vercel.json

   1{
   2  "name": "Vercel",
   3  "id": "vercel",
   4  "api_key": "$VERCEL_API_KEY",
   5  "api_endpoint": "https://ai-gateway.vercel.sh/v1",
   6  "type": "vercel",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-haiku-4.5",
   9  "models": [
  10    {
  11      "id": "anthropic/claude-3-haiku",
  12      "name": "Claude 3 Haiku",
  13      "cost_per_1m_in": 0.25,
  14      "cost_per_1m_out": 1.25,
  15      "cost_per_1m_in_cached": 0.03,
  16      "cost_per_1m_out_cached": 0.3,
  17      "context_window": 200000,
  18      "default_max_tokens": 4096,
  19      "can_reason": false,
  20      "supports_attachments": true
  21    },
  22    {
  23      "id": "anthropic/claude-3.5-haiku",
  24      "name": "Claude 3.5 Haiku",
  25      "cost_per_1m_in": 0.8,
  26      "cost_per_1m_out": 4,
  27      "cost_per_1m_in_cached": 0.08,
  28      "cost_per_1m_out_cached": 1,
  29      "context_window": 200000,
  30      "default_max_tokens": 8000,
  31      "can_reason": false,
  32      "supports_attachments": true
  33    },
  34    {
  35      "id": "anthropic/claude-3.7-sonnet",
  36      "name": "Claude 3.7 Sonnet",
  37      "cost_per_1m_in": 3,
  38      "cost_per_1m_out": 15,
  39      "cost_per_1m_in_cached": 0.3,
  40      "cost_per_1m_out_cached": 3.75,
  41      "context_window": 200000,
  42      "default_max_tokens": 8000,
  43      "can_reason": true,
  44      "reasoning_levels": [
  45        "none",
  46        "minimal",
  47        "low",
  48        "medium",
  49        "high",
  50        "xhigh"
  51      ],
  52      "default_reasoning_effort": "medium",
  53      "supports_attachments": true
  54    },
  55    {
  56      "id": "anthropic/claude-haiku-4.5",
  57      "name": "Claude Haiku 4.5",
  58      "cost_per_1m_in": 1,
  59      "cost_per_1m_out": 5,
  60      "cost_per_1m_in_cached": 0.1,
  61      "cost_per_1m_out_cached": 1.25,
  62      "context_window": 200000,
  63      "default_max_tokens": 8000,
  64      "can_reason": true,
  65      "reasoning_levels": [
  66        "none",
  67        "minimal",
  68        "low",
  69        "medium",
  70        "high",
  71        "xhigh"
  72      ],
  73      "default_reasoning_effort": "medium",
  74      "supports_attachments": true
  75    },
  76    {
  77      "id": "anthropic/claude-opus-4",
  78      "name": "Claude Opus 4",
  79      "cost_per_1m_in": 15,
  80      "cost_per_1m_out": 75,
  81      "cost_per_1m_in_cached": 1.5,
  82      "cost_per_1m_out_cached": 18.75,
  83      "context_window": 200000,
  84      "default_max_tokens": 8000,
  85      "can_reason": true,
  86      "reasoning_levels": [
  87        "none",
  88        "minimal",
  89        "low",
  90        "medium",
  91        "high",
  92        "xhigh"
  93      ],
  94      "default_reasoning_effort": "medium",
  95      "supports_attachments": true
  96    },
  97    {
  98      "id": "anthropic/claude-opus-4.1",
  99      "name": "Claude Opus 4.1",
 100      "cost_per_1m_in": 15,
 101      "cost_per_1m_out": 75,
 102      "cost_per_1m_in_cached": 1.5,
 103      "cost_per_1m_out_cached": 18.75,
 104      "context_window": 200000,
 105      "default_max_tokens": 8000,
 106      "can_reason": true,
 107      "reasoning_levels": [
 108        "none",
 109        "minimal",
 110        "low",
 111        "medium",
 112        "high",
 113        "xhigh"
 114      ],
 115      "default_reasoning_effort": "medium",
 116      "supports_attachments": true
 117    },
 118    {
 119      "id": "anthropic/claude-opus-4.5",
 120      "name": "Claude Opus 4.5",
 121      "cost_per_1m_in": 5,
 122      "cost_per_1m_out": 25,
 123      "cost_per_1m_in_cached": 0.5,
 124      "cost_per_1m_out_cached": 6.25,
 125      "context_window": 200000,
 126      "default_max_tokens": 8000,
 127      "can_reason": true,
 128      "reasoning_levels": [
 129        "none",
 130        "minimal",
 131        "low",
 132        "medium",
 133        "high",
 134        "xhigh"
 135      ],
 136      "default_reasoning_effort": "medium",
 137      "supports_attachments": true
 138    },
 139    {
 140      "id": "anthropic/claude-opus-4.6",
 141      "name": "Claude Opus 4.6",
 142      "cost_per_1m_in": 5,
 143      "cost_per_1m_out": 25,
 144      "cost_per_1m_in_cached": 0.5,
 145      "cost_per_1m_out_cached": 6.25,
 146      "context_window": 1000000,
 147      "default_max_tokens": 8000,
 148      "can_reason": true,
 149      "reasoning_levels": [
 150        "none",
 151        "minimal",
 152        "low",
 153        "medium",
 154        "high",
 155        "xhigh"
 156      ],
 157      "default_reasoning_effort": "medium",
 158      "supports_attachments": true
 159    },
 160    {
 161      "id": "anthropic/claude-opus-4.7",
 162      "name": "Claude Opus 4.7",
 163      "cost_per_1m_in": 5,
 164      "cost_per_1m_out": 25,
 165      "cost_per_1m_in_cached": 0.5,
 166      "cost_per_1m_out_cached": 6.25,
 167      "context_window": 1000000,
 168      "default_max_tokens": 8000,
 169      "can_reason": true,
 170      "reasoning_levels": [
 171        "none",
 172        "minimal",
 173        "low",
 174        "medium",
 175        "high",
 176        "xhigh"
 177      ],
 178      "default_reasoning_effort": "medium",
 179      "supports_attachments": true
 180    },
 181    {
 182      "id": "anthropic/claude-sonnet-4",
 183      "name": "Claude Sonnet 4",
 184      "cost_per_1m_in": 3,
 185      "cost_per_1m_out": 15,
 186      "cost_per_1m_in_cached": 0.3,
 187      "cost_per_1m_out_cached": 3.75,
 188      "context_window": 1000000,
 189      "default_max_tokens": 8000,
 190      "can_reason": true,
 191      "reasoning_levels": [
 192        "none",
 193        "minimal",
 194        "low",
 195        "medium",
 196        "high",
 197        "xhigh"
 198      ],
 199      "default_reasoning_effort": "medium",
 200      "supports_attachments": true
 201    },
 202    {
 203      "id": "anthropic/claude-sonnet-4.5",
 204      "name": "Claude Sonnet 4.5",
 205      "cost_per_1m_in": 3,
 206      "cost_per_1m_out": 15,
 207      "cost_per_1m_in_cached": 0.3,
 208      "cost_per_1m_out_cached": 3.75,
 209      "context_window": 1000000,
 210      "default_max_tokens": 8000,
 211      "can_reason": true,
 212      "reasoning_levels": [
 213        "none",
 214        "minimal",
 215        "low",
 216        "medium",
 217        "high",
 218        "xhigh"
 219      ],
 220      "default_reasoning_effort": "medium",
 221      "supports_attachments": true
 222    },
 223    {
 224      "id": "anthropic/claude-sonnet-4.6",
 225      "name": "Claude Sonnet 4.6",
 226      "cost_per_1m_in": 3,
 227      "cost_per_1m_out": 15,
 228      "cost_per_1m_in_cached": 0.3,
 229      "cost_per_1m_out_cached": 3.75,
 230      "context_window": 1000000,
 231      "default_max_tokens": 8000,
 232      "can_reason": true,
 233      "reasoning_levels": [
 234        "none",
 235        "minimal",
 236        "low",
 237        "medium",
 238        "high",
 239        "xhigh"
 240      ],
 241      "default_reasoning_effort": "medium",
 242      "supports_attachments": true
 243    },
 244    {
 245      "id": "cohere/command-a",
 246      "name": "Command A",
 247      "cost_per_1m_in": 2.5,
 248      "cost_per_1m_out": 10,
 249      "cost_per_1m_in_cached": 0,
 250      "cost_per_1m_out_cached": 0,
 251      "context_window": 256000,
 252      "default_max_tokens": 8000,
 253      "can_reason": false,
 254      "supports_attachments": false
 255    },
 256    {
 257      "id": "deepseek/deepseek-v3",
 258      "name": "DeepSeek V3 0324",
 259      "cost_per_1m_in": 0.77,
 260      "cost_per_1m_out": 0.77,
 261      "cost_per_1m_in_cached": 0,
 262      "cost_per_1m_out_cached": 0,
 263      "context_window": 163840,
 264      "default_max_tokens": 8000,
 265      "can_reason": false,
 266      "supports_attachments": false
 267    },
 268    {
 269      "id": "deepseek/deepseek-v3.1-terminus",
 270      "name": "DeepSeek V3.1 Terminus",
 271      "cost_per_1m_in": 0.27,
 272      "cost_per_1m_out": 1,
 273      "cost_per_1m_in_cached": 0.135,
 274      "cost_per_1m_out_cached": 0,
 275      "context_window": 131072,
 276      "default_max_tokens": 8000,
 277      "can_reason": true,
 278      "reasoning_levels": [
 279        "low",
 280        "medium",
 281        "high"
 282      ],
 283      "default_reasoning_effort": "medium",
 284      "supports_attachments": false
 285    },
 286    {
 287      "id": "deepseek/deepseek-v3.2",
 288      "name": "DeepSeek V3.2",
 289      "cost_per_1m_in": 0.28,
 290      "cost_per_1m_out": 0.42,
 291      "cost_per_1m_in_cached": 0.028,
 292      "cost_per_1m_out_cached": 0,
 293      "context_window": 128000,
 294      "default_max_tokens": 8000,
 295      "can_reason": false,
 296      "supports_attachments": false
 297    },
 298    {
 299      "id": "deepseek/deepseek-v3.2-thinking",
 300      "name": "DeepSeek V3.2 Thinking",
 301      "cost_per_1m_in": 0.28,
 302      "cost_per_1m_out": 0.42,
 303      "cost_per_1m_in_cached": 0.028,
 304      "cost_per_1m_out_cached": 0,
 305      "context_window": 128000,
 306      "default_max_tokens": 8000,
 307      "can_reason": true,
 308      "reasoning_levels": [
 309        "low",
 310        "medium",
 311        "high"
 312      ],
 313      "default_reasoning_effort": "medium",
 314      "supports_attachments": false
 315    },
 316    {
 317      "id": "deepseek/deepseek-v4-flash",
 318      "name": "DeepSeek V4 Flash",
 319      "cost_per_1m_in": 0.14,
 320      "cost_per_1m_out": 0.28,
 321      "cost_per_1m_in_cached": 0.028,
 322      "cost_per_1m_out_cached": 0,
 323      "context_window": 1000000,
 324      "default_max_tokens": 8000,
 325      "can_reason": true,
 326      "reasoning_levels": [
 327        "low",
 328        "medium",
 329        "high"
 330      ],
 331      "default_reasoning_effort": "medium",
 332      "supports_attachments": false
 333    },
 334    {
 335      "id": "deepseek/deepseek-v4-pro",
 336      "name": "DeepSeek V4 Pro",
 337      "cost_per_1m_in": 1.74,
 338      "cost_per_1m_out": 3.48,
 339      "cost_per_1m_in_cached": 0.145,
 340      "cost_per_1m_out_cached": 0,
 341      "context_window": 1000000,
 342      "default_max_tokens": 8000,
 343      "can_reason": true,
 344      "reasoning_levels": [
 345        "low",
 346        "medium",
 347        "high"
 348      ],
 349      "default_reasoning_effort": "medium",
 350      "supports_attachments": false
 351    },
 352    {
 353      "id": "deepseek/deepseek-r1",
 354      "name": "DeepSeek-R1",
 355      "cost_per_1m_in": 1.35,
 356      "cost_per_1m_out": 5.4,
 357      "cost_per_1m_in_cached": 0,
 358      "cost_per_1m_out_cached": 0,
 359      "context_window": 128000,
 360      "default_max_tokens": 8000,
 361      "can_reason": true,
 362      "reasoning_levels": [
 363        "low",
 364        "medium",
 365        "high"
 366      ],
 367      "default_reasoning_effort": "medium",
 368      "supports_attachments": false
 369    },
 370    {
 371      "id": "deepseek/deepseek-v3.1",
 372      "name": "DeepSeek-V3.1",
 373      "cost_per_1m_in": 0.56,
 374      "cost_per_1m_out": 1.68,
 375      "cost_per_1m_in_cached": 0.28,
 376      "cost_per_1m_out_cached": 0,
 377      "context_window": 163840,
 378      "default_max_tokens": 8000,
 379      "can_reason": true,
 380      "reasoning_levels": [
 381        "low",
 382        "medium",
 383        "high"
 384      ],
 385      "default_reasoning_effort": "medium",
 386      "supports_attachments": false
 387    },
 388    {
 389      "id": "mistral/devstral-2",
 390      "name": "Devstral 2",
 391      "cost_per_1m_in": 0.4,
 392      "cost_per_1m_out": 2,
 393      "cost_per_1m_in_cached": 0,
 394      "cost_per_1m_out_cached": 0,
 395      "context_window": 256000,
 396      "default_max_tokens": 8000,
 397      "can_reason": false,
 398      "supports_attachments": false
 399    },
 400    {
 401      "id": "mistral/devstral-small",
 402      "name": "Devstral Small 1.1",
 403      "cost_per_1m_in": 0.1,
 404      "cost_per_1m_out": 0.3,
 405      "cost_per_1m_in_cached": 0,
 406      "cost_per_1m_out_cached": 0,
 407      "context_window": 128000,
 408      "default_max_tokens": 8000,
 409      "can_reason": false,
 410      "supports_attachments": false
 411    },
 412    {
 413      "id": "mistral/devstral-small-2",
 414      "name": "Devstral Small 2",
 415      "cost_per_1m_in": 0.1,
 416      "cost_per_1m_out": 0.3,
 417      "cost_per_1m_in_cached": 0,
 418      "cost_per_1m_out_cached": 0,
 419      "context_window": 256000,
 420      "default_max_tokens": 8000,
 421      "can_reason": false,
 422      "supports_attachments": false
 423    },
 424    {
 425      "id": "zai/glm-4.5-air",
 426      "name": "GLM 4.5 Air",
 427      "cost_per_1m_in": 0.2,
 428      "cost_per_1m_out": 1.1,
 429      "cost_per_1m_in_cached": 0.03,
 430      "cost_per_1m_out_cached": 0,
 431      "context_window": 128000,
 432      "default_max_tokens": 8000,
 433      "can_reason": true,
 434      "reasoning_levels": [
 435        "low",
 436        "medium",
 437        "high"
 438      ],
 439      "default_reasoning_effort": "medium",
 440      "supports_attachments": false
 441    },
 442    {
 443      "id": "zai/glm-4.5v",
 444      "name": "GLM 4.5V",
 445      "cost_per_1m_in": 0.6,
 446      "cost_per_1m_out": 1.8,
 447      "cost_per_1m_in_cached": 0.11,
 448      "cost_per_1m_out_cached": 0,
 449      "context_window": 66000,
 450      "default_max_tokens": 8000,
 451      "can_reason": false,
 452      "supports_attachments": true
 453    },
 454    {
 455      "id": "zai/glm-4.6",
 456      "name": "GLM 4.6",
 457      "cost_per_1m_in": 0.6,
 458      "cost_per_1m_out": 2.2,
 459      "cost_per_1m_in_cached": 0.11,
 460      "cost_per_1m_out_cached": 0,
 461      "context_window": 200000,
 462      "default_max_tokens": 8000,
 463      "can_reason": true,
 464      "reasoning_levels": [
 465        "low",
 466        "medium",
 467        "high"
 468      ],
 469      "default_reasoning_effort": "medium",
 470      "supports_attachments": false
 471    },
 472    {
 473      "id": "zai/glm-4.7",
 474      "name": "GLM 4.7",
 475      "cost_per_1m_in": 2.25,
 476      "cost_per_1m_out": 2.75,
 477      "cost_per_1m_in_cached": 2.25,
 478      "cost_per_1m_out_cached": 0,
 479      "context_window": 131000,
 480      "default_max_tokens": 8000,
 481      "can_reason": true,
 482      "reasoning_levels": [
 483        "low",
 484        "medium",
 485        "high"
 486      ],
 487      "default_reasoning_effort": "medium",
 488      "supports_attachments": false
 489    },
 490    {
 491      "id": "zai/glm-4.7-flash",
 492      "name": "GLM 4.7 Flash",
 493      "cost_per_1m_in": 0.07,
 494      "cost_per_1m_out": 0.4,
 495      "cost_per_1m_in_cached": 0,
 496      "cost_per_1m_out_cached": 0,
 497      "context_window": 200000,
 498      "default_max_tokens": 8000,
 499      "can_reason": true,
 500      "reasoning_levels": [
 501        "low",
 502        "medium",
 503        "high"
 504      ],
 505      "default_reasoning_effort": "medium",
 506      "supports_attachments": false
 507    },
 508    {
 509      "id": "zai/glm-4.7-flashx",
 510      "name": "GLM 4.7 FlashX",
 511      "cost_per_1m_in": 0.06,
 512      "cost_per_1m_out": 0.4,
 513      "cost_per_1m_in_cached": 0.01,
 514      "cost_per_1m_out_cached": 0,
 515      "context_window": 200000,
 516      "default_max_tokens": 8000,
 517      "can_reason": true,
 518      "reasoning_levels": [
 519        "low",
 520        "medium",
 521        "high"
 522      ],
 523      "default_reasoning_effort": "medium",
 524      "supports_attachments": false
 525    },
 526    {
 527      "id": "zai/glm-5",
 528      "name": "GLM 5",
 529      "cost_per_1m_in": 1,
 530      "cost_per_1m_out": 3.2,
 531      "cost_per_1m_in_cached": 0.2,
 532      "cost_per_1m_out_cached": 0,
 533      "context_window": 202800,
 534      "default_max_tokens": 8000,
 535      "can_reason": true,
 536      "reasoning_levels": [
 537        "low",
 538        "medium",
 539        "high"
 540      ],
 541      "default_reasoning_effort": "medium",
 542      "supports_attachments": false
 543    },
 544    {
 545      "id": "zai/glm-5-turbo",
 546      "name": "GLM 5 Turbo",
 547      "cost_per_1m_in": 1.2,
 548      "cost_per_1m_out": 4,
 549      "cost_per_1m_in_cached": 0.24,
 550      "cost_per_1m_out_cached": 0,
 551      "context_window": 202800,
 552      "default_max_tokens": 8000,
 553      "can_reason": true,
 554      "reasoning_levels": [
 555        "low",
 556        "medium",
 557        "high"
 558      ],
 559      "default_reasoning_effort": "medium",
 560      "supports_attachments": false
 561    },
 562    {
 563      "id": "zai/glm-5.1",
 564      "name": "GLM 5.1",
 565      "cost_per_1m_in": 1.4,
 566      "cost_per_1m_out": 4.4,
 567      "cost_per_1m_in_cached": 0.26,
 568      "cost_per_1m_out_cached": 0,
 569      "context_window": 202800,
 570      "default_max_tokens": 8000,
 571      "can_reason": true,
 572      "reasoning_levels": [
 573        "low",
 574        "medium",
 575        "high"
 576      ],
 577      "default_reasoning_effort": "medium",
 578      "supports_attachments": false
 579    },
 580    {
 581      "id": "zai/glm-5v-turbo",
 582      "name": "GLM 5V Turbo",
 583      "cost_per_1m_in": 1.2,
 584      "cost_per_1m_out": 4,
 585      "cost_per_1m_in_cached": 0.24,
 586      "cost_per_1m_out_cached": 0,
 587      "context_window": 200000,
 588      "default_max_tokens": 8000,
 589      "can_reason": true,
 590      "reasoning_levels": [
 591        "low",
 592        "medium",
 593        "high"
 594      ],
 595      "default_reasoning_effort": "medium",
 596      "supports_attachments": true
 597    },
 598    {
 599      "id": "zai/glm-4.5",
 600      "name": "GLM-4.5",
 601      "cost_per_1m_in": 0.6,
 602      "cost_per_1m_out": 2.2,
 603      "cost_per_1m_in_cached": 0.11,
 604      "cost_per_1m_out_cached": 0,
 605      "context_window": 128000,
 606      "default_max_tokens": 8000,
 607      "can_reason": true,
 608      "reasoning_levels": [
 609        "low",
 610        "medium",
 611        "high"
 612      ],
 613      "default_reasoning_effort": "medium",
 614      "supports_attachments": false
 615    },
 616    {
 617      "id": "zai/glm-4.6v",
 618      "name": "GLM-4.6V",
 619      "cost_per_1m_in": 0.3,
 620      "cost_per_1m_out": 0.9,
 621      "cost_per_1m_in_cached": 0.05,
 622      "cost_per_1m_out_cached": 0,
 623      "context_window": 128000,
 624      "default_max_tokens": 8000,
 625      "can_reason": true,
 626      "reasoning_levels": [
 627        "low",
 628        "medium",
 629        "high"
 630      ],
 631      "default_reasoning_effort": "medium",
 632      "supports_attachments": true
 633    },
 634    {
 635      "id": "zai/glm-4.6v-flash",
 636      "name": "GLM-4.6V-Flash",
 637      "cost_per_1m_in": 0,
 638      "cost_per_1m_out": 0,
 639      "cost_per_1m_in_cached": 0,
 640      "cost_per_1m_out_cached": 0,
 641      "context_window": 128000,
 642      "default_max_tokens": 8000,
 643      "can_reason": true,
 644      "reasoning_levels": [
 645        "low",
 646        "medium",
 647        "high"
 648      ],
 649      "default_reasoning_effort": "medium",
 650      "supports_attachments": true
 651    },
 652    {
 653      "id": "openai/gpt-5-chat",
 654      "name": "GPT 5 Chat",
 655      "cost_per_1m_in": 1.25,
 656      "cost_per_1m_out": 10,
 657      "cost_per_1m_in_cached": 0.125,
 658      "cost_per_1m_out_cached": 0,
 659      "context_window": 128000,
 660      "default_max_tokens": 8000,
 661      "can_reason": true,
 662      "reasoning_levels": [
 663        "low",
 664        "medium",
 665        "high"
 666      ],
 667      "default_reasoning_effort": "medium",
 668      "supports_attachments": true
 669    },
 670    {
 671      "id": "openai/gpt-5.1-codex-max",
 672      "name": "GPT 5.1 Codex Max",
 673      "cost_per_1m_in": 1.25,
 674      "cost_per_1m_out": 10,
 675      "cost_per_1m_in_cached": 0.125,
 676      "cost_per_1m_out_cached": 0,
 677      "context_window": 400000,
 678      "default_max_tokens": 8000,
 679      "can_reason": true,
 680      "reasoning_levels": [
 681        "low",
 682        "medium",
 683        "high"
 684      ],
 685      "default_reasoning_effort": "medium",
 686      "supports_attachments": true
 687    },
 688    {
 689      "id": "openai/gpt-5.1-codex-mini",
 690      "name": "GPT 5.1 Codex Mini",
 691      "cost_per_1m_in": 0.25,
 692      "cost_per_1m_out": 2,
 693      "cost_per_1m_in_cached": 0.025,
 694      "cost_per_1m_out_cached": 0,
 695      "context_window": 400000,
 696      "default_max_tokens": 8000,
 697      "can_reason": true,
 698      "reasoning_levels": [
 699        "low",
 700        "medium",
 701        "high"
 702      ],
 703      "default_reasoning_effort": "medium",
 704      "supports_attachments": true
 705    },
 706    {
 707      "id": "openai/gpt-5.1-thinking",
 708      "name": "GPT 5.1 Thinking",
 709      "cost_per_1m_in": 1.25,
 710      "cost_per_1m_out": 10,
 711      "cost_per_1m_in_cached": 0.125,
 712      "cost_per_1m_out_cached": 0,
 713      "context_window": 400000,
 714      "default_max_tokens": 8000,
 715      "can_reason": true,
 716      "reasoning_levels": [
 717        "low",
 718        "medium",
 719        "high"
 720      ],
 721      "default_reasoning_effort": "medium",
 722      "supports_attachments": true
 723    },
 724    {
 725      "id": "openai/gpt-5.2",
 726      "name": "GPT 5.2",
 727      "cost_per_1m_in": 1.75,
 728      "cost_per_1m_out": 14,
 729      "cost_per_1m_in_cached": 0.175,
 730      "cost_per_1m_out_cached": 0,
 731      "context_window": 400000,
 732      "default_max_tokens": 8000,
 733      "can_reason": true,
 734      "reasoning_levels": [
 735        "low",
 736        "medium",
 737        "high"
 738      ],
 739      "default_reasoning_effort": "medium",
 740      "supports_attachments": true
 741    },
 742    {
 743      "id": "openai/gpt-5.2-pro",
 744      "name": "GPT 5.2 ",
 745      "cost_per_1m_in": 21,
 746      "cost_per_1m_out": 168,
 747      "cost_per_1m_in_cached": 0,
 748      "cost_per_1m_out_cached": 0,
 749      "context_window": 400000,
 750      "default_max_tokens": 8000,
 751      "can_reason": true,
 752      "reasoning_levels": [
 753        "low",
 754        "medium",
 755        "high"
 756      ],
 757      "default_reasoning_effort": "medium",
 758      "supports_attachments": true
 759    },
 760    {
 761      "id": "openai/gpt-5.2-chat",
 762      "name": "GPT 5.2 Chat",
 763      "cost_per_1m_in": 1.75,
 764      "cost_per_1m_out": 14,
 765      "cost_per_1m_in_cached": 0.175,
 766      "cost_per_1m_out_cached": 0,
 767      "context_window": 128000,
 768      "default_max_tokens": 8000,
 769      "can_reason": true,
 770      "reasoning_levels": [
 771        "low",
 772        "medium",
 773        "high"
 774      ],
 775      "default_reasoning_effort": "medium",
 776      "supports_attachments": true
 777    },
 778    {
 779      "id": "openai/gpt-5.2-codex",
 780      "name": "GPT 5.2 Codex",
 781      "cost_per_1m_in": 1.75,
 782      "cost_per_1m_out": 14,
 783      "cost_per_1m_in_cached": 0.175,
 784      "cost_per_1m_out_cached": 0,
 785      "context_window": 400000,
 786      "default_max_tokens": 8000,
 787      "can_reason": true,
 788      "reasoning_levels": [
 789        "low",
 790        "medium",
 791        "high"
 792      ],
 793      "default_reasoning_effort": "medium",
 794      "supports_attachments": true
 795    },
 796    {
 797      "id": "openai/gpt-5.3-codex",
 798      "name": "GPT 5.3 Codex",
 799      "cost_per_1m_in": 1.75,
 800      "cost_per_1m_out": 14,
 801      "cost_per_1m_in_cached": 0.175,
 802      "cost_per_1m_out_cached": 0,
 803      "context_window": 400000,
 804      "default_max_tokens": 8000,
 805      "can_reason": true,
 806      "reasoning_levels": [
 807        "low",
 808        "medium",
 809        "high"
 810      ],
 811      "default_reasoning_effort": "medium",
 812      "supports_attachments": true
 813    },
 814    {
 815      "id": "openai/gpt-5.4",
 816      "name": "GPT 5.4",
 817      "cost_per_1m_in": 2.5,
 818      "cost_per_1m_out": 15,
 819      "cost_per_1m_in_cached": 0.25,
 820      "cost_per_1m_out_cached": 0,
 821      "context_window": 1050000,
 822      "default_max_tokens": 8000,
 823      "can_reason": true,
 824      "reasoning_levels": [
 825        "low",
 826        "medium",
 827        "high"
 828      ],
 829      "default_reasoning_effort": "medium",
 830      "supports_attachments": true
 831    },
 832    {
 833      "id": "openai/gpt-5.4-mini",
 834      "name": "GPT 5.4 Mini",
 835      "cost_per_1m_in": 0.75,
 836      "cost_per_1m_out": 4.5,
 837      "cost_per_1m_in_cached": 0.075,
 838      "cost_per_1m_out_cached": 0,
 839      "context_window": 400000,
 840      "default_max_tokens": 8000,
 841      "can_reason": true,
 842      "reasoning_levels": [
 843        "low",
 844        "medium",
 845        "high"
 846      ],
 847      "default_reasoning_effort": "medium",
 848      "supports_attachments": true
 849    },
 850    {
 851      "id": "openai/gpt-5.4-nano",
 852      "name": "GPT 5.4 Nano",
 853      "cost_per_1m_in": 0.2,
 854      "cost_per_1m_out": 1.25,
 855      "cost_per_1m_in_cached": 0.02,
 856      "cost_per_1m_out_cached": 0,
 857      "context_window": 400000,
 858      "default_max_tokens": 8000,
 859      "can_reason": true,
 860      "reasoning_levels": [
 861        "low",
 862        "medium",
 863        "high"
 864      ],
 865      "default_reasoning_effort": "medium",
 866      "supports_attachments": true
 867    },
 868    {
 869      "id": "openai/gpt-5.4-pro",
 870      "name": "GPT 5.4 Pro",
 871      "cost_per_1m_in": 30,
 872      "cost_per_1m_out": 180,
 873      "cost_per_1m_in_cached": 0,
 874      "cost_per_1m_out_cached": 0,
 875      "context_window": 1050000,
 876      "default_max_tokens": 8000,
 877      "can_reason": true,
 878      "reasoning_levels": [
 879        "low",
 880        "medium",
 881        "high"
 882      ],
 883      "default_reasoning_effort": "medium",
 884      "supports_attachments": true
 885    },
 886    {
 887      "id": "openai/gpt-5.5",
 888      "name": "GPT 5.5",
 889      "cost_per_1m_in": 5,
 890      "cost_per_1m_out": 30,
 891      "cost_per_1m_in_cached": 0.5,
 892      "cost_per_1m_out_cached": 0,
 893      "context_window": 1000000,
 894      "default_max_tokens": 8000,
 895      "can_reason": true,
 896      "reasoning_levels": [
 897        "low",
 898        "medium",
 899        "high"
 900      ],
 901      "default_reasoning_effort": "medium",
 902      "supports_attachments": true
 903    },
 904    {
 905      "id": "openai/gpt-5.5-pro",
 906      "name": "GPT 5.5 Pro",
 907      "cost_per_1m_in": 30,
 908      "cost_per_1m_out": 180,
 909      "cost_per_1m_in_cached": 0,
 910      "cost_per_1m_out_cached": 0,
 911      "context_window": 1000000,
 912      "default_max_tokens": 8000,
 913      "can_reason": true,
 914      "reasoning_levels": [
 915        "low",
 916        "medium",
 917        "high"
 918      ],
 919      "default_reasoning_effort": "medium",
 920      "supports_attachments": true
 921    },
 922    {
 923      "id": "openai/gpt-oss-20b",
 924      "name": "GPT OSS 120B",
 925      "cost_per_1m_in": 0.05,
 926      "cost_per_1m_out": 0.2,
 927      "cost_per_1m_in_cached": 0,
 928      "cost_per_1m_out_cached": 0,
 929      "context_window": 131072,
 930      "default_max_tokens": 8000,
 931      "can_reason": true,
 932      "reasoning_levels": [
 933        "low",
 934        "medium",
 935        "high"
 936      ],
 937      "default_reasoning_effort": "medium",
 938      "supports_attachments": false
 939    },
 940    {
 941      "id": "openai/gpt-oss-safeguard-20b",
 942      "name": "GPT OSS Safeguard 20B",
 943      "cost_per_1m_in": 0.075,
 944      "cost_per_1m_out": 0.3,
 945      "cost_per_1m_in_cached": 0.037,
 946      "cost_per_1m_out_cached": 0,
 947      "context_window": 131072,
 948      "default_max_tokens": 8000,
 949      "can_reason": true,
 950      "reasoning_levels": [
 951        "low",
 952        "medium",
 953        "high"
 954      ],
 955      "default_reasoning_effort": "medium",
 956      "supports_attachments": false
 957    },
 958    {
 959      "id": "openai/gpt-4-turbo",
 960      "name": "GPT-4 Turbo",
 961      "cost_per_1m_in": 10,
 962      "cost_per_1m_out": 30,
 963      "cost_per_1m_in_cached": 0,
 964      "cost_per_1m_out_cached": 0,
 965      "context_window": 128000,
 966      "default_max_tokens": 4096,
 967      "can_reason": false,
 968      "supports_attachments": true
 969    },
 970    {
 971      "id": "openai/gpt-4.1",
 972      "name": "GPT-4.1",
 973      "cost_per_1m_in": 2,
 974      "cost_per_1m_out": 8,
 975      "cost_per_1m_in_cached": 0.5,
 976      "cost_per_1m_out_cached": 0,
 977      "context_window": 1047576,
 978      "default_max_tokens": 8000,
 979      "can_reason": false,
 980      "supports_attachments": true
 981    },
 982    {
 983      "id": "openai/gpt-4.1-mini",
 984      "name": "GPT-4.1 mini",
 985      "cost_per_1m_in": 0.4,
 986      "cost_per_1m_out": 1.6,
 987      "cost_per_1m_in_cached": 0.1,
 988      "cost_per_1m_out_cached": 0,
 989      "context_window": 1047576,
 990      "default_max_tokens": 8000,
 991      "can_reason": false,
 992      "supports_attachments": true
 993    },
 994    {
 995      "id": "openai/gpt-4.1-nano",
 996      "name": "GPT-4.1 nano",
 997      "cost_per_1m_in": 0.1,
 998      "cost_per_1m_out": 0.4,
 999      "cost_per_1m_in_cached": 0.025,
1000      "cost_per_1m_out_cached": 0,
1001      "context_window": 1047576,
1002      "default_max_tokens": 8000,
1003      "can_reason": false,
1004      "supports_attachments": true
1005    },
1006    {
1007      "id": "openai/gpt-4o",
1008      "name": "GPT-4o",
1009      "cost_per_1m_in": 2.5,
1010      "cost_per_1m_out": 10,
1011      "cost_per_1m_in_cached": 1.25,
1012      "cost_per_1m_out_cached": 0,
1013      "context_window": 128000,
1014      "default_max_tokens": 8000,
1015      "can_reason": false,
1016      "supports_attachments": true
1017    },
1018    {
1019      "id": "openai/gpt-4o-mini",
1020      "name": "GPT-4o mini",
1021      "cost_per_1m_in": 0.15,
1022      "cost_per_1m_out": 0.6,
1023      "cost_per_1m_in_cached": 0.075,
1024      "cost_per_1m_out_cached": 0,
1025      "context_window": 128000,
1026      "default_max_tokens": 8000,
1027      "can_reason": false,
1028      "supports_attachments": true
1029    },
1030    {
1031      "id": "openai/gpt-5",
1032      "name": "GPT-5",
1033      "cost_per_1m_in": 1.25,
1034      "cost_per_1m_out": 10,
1035      "cost_per_1m_in_cached": 0.125,
1036      "cost_per_1m_out_cached": 0,
1037      "context_window": 400000,
1038      "default_max_tokens": 8000,
1039      "can_reason": true,
1040      "reasoning_levels": [
1041        "low",
1042        "medium",
1043        "high"
1044      ],
1045      "default_reasoning_effort": "medium",
1046      "supports_attachments": true
1047    },
1048    {
1049      "id": "openai/gpt-5-mini",
1050      "name": "GPT-5 mini",
1051      "cost_per_1m_in": 0.25,
1052      "cost_per_1m_out": 2,
1053      "cost_per_1m_in_cached": 0.025,
1054      "cost_per_1m_out_cached": 0,
1055      "context_window": 400000,
1056      "default_max_tokens": 8000,
1057      "can_reason": true,
1058      "reasoning_levels": [
1059        "low",
1060        "medium",
1061        "high"
1062      ],
1063      "default_reasoning_effort": "medium",
1064      "supports_attachments": true
1065    },
1066    {
1067      "id": "openai/gpt-5-nano",
1068      "name": "GPT-5 nano",
1069      "cost_per_1m_in": 0.05,
1070      "cost_per_1m_out": 0.4,
1071      "cost_per_1m_in_cached": 0.005,
1072      "cost_per_1m_out_cached": 0,
1073      "context_window": 400000,
1074      "default_max_tokens": 8000,
1075      "can_reason": true,
1076      "reasoning_levels": [
1077        "low",
1078        "medium",
1079        "high"
1080      ],
1081      "default_reasoning_effort": "medium",
1082      "supports_attachments": true
1083    },
1084    {
1085      "id": "openai/gpt-5-pro",
1086      "name": "GPT-5 pro",
1087      "cost_per_1m_in": 15,
1088      "cost_per_1m_out": 120,
1089      "cost_per_1m_in_cached": 0,
1090      "cost_per_1m_out_cached": 0,
1091      "context_window": 400000,
1092      "default_max_tokens": 8000,
1093      "can_reason": true,
1094      "reasoning_levels": [
1095        "low",
1096        "medium",
1097        "high"
1098      ],
1099      "default_reasoning_effort": "medium",
1100      "supports_attachments": true
1101    },
1102    {
1103      "id": "openai/gpt-5-codex",
1104      "name": "GPT-5-Codex",
1105      "cost_per_1m_in": 1.25,
1106      "cost_per_1m_out": 10,
1107      "cost_per_1m_in_cached": 0.125,
1108      "cost_per_1m_out_cached": 0,
1109      "context_window": 400000,
1110      "default_max_tokens": 8000,
1111      "can_reason": true,
1112      "reasoning_levels": [
1113        "low",
1114        "medium",
1115        "high"
1116      ],
1117      "default_reasoning_effort": "medium",
1118      "supports_attachments": false
1119    },
1120    {
1121      "id": "openai/gpt-5.1-instant",
1122      "name": "GPT-5.1 Instant",
1123      "cost_per_1m_in": 1.25,
1124      "cost_per_1m_out": 10,
1125      "cost_per_1m_in_cached": 0.125,
1126      "cost_per_1m_out_cached": 0,
1127      "context_window": 128000,
1128      "default_max_tokens": 8000,
1129      "can_reason": true,
1130      "reasoning_levels": [
1131        "low",
1132        "medium",
1133        "high"
1134      ],
1135      "default_reasoning_effort": "medium",
1136      "supports_attachments": true
1137    },
1138    {
1139      "id": "openai/gpt-5.1-codex",
1140      "name": "GPT-5.1-Codex",
1141      "cost_per_1m_in": 1.25,
1142      "cost_per_1m_out": 10,
1143      "cost_per_1m_in_cached": 0.125,
1144      "cost_per_1m_out_cached": 0,
1145      "context_window": 400000,
1146      "default_max_tokens": 8000,
1147      "can_reason": true,
1148      "reasoning_levels": [
1149        "low",
1150        "medium",
1151        "high"
1152      ],
1153      "default_reasoning_effort": "medium",
1154      "supports_attachments": true
1155    },
1156    {
1157      "id": "openai/gpt-5.3-chat",
1158      "name": "GPT-5.3 Chat",
1159      "cost_per_1m_in": 1.75,
1160      "cost_per_1m_out": 14,
1161      "cost_per_1m_in_cached": 0.175,
1162      "cost_per_1m_out_cached": 0,
1163      "context_window": 128000,
1164      "default_max_tokens": 8000,
1165      "can_reason": true,
1166      "reasoning_levels": [
1167        "low",
1168        "medium",
1169        "high"
1170      ],
1171      "default_reasoning_effort": "medium",
1172      "supports_attachments": true
1173    },
1174    {
1175      "id": "google/gemini-2.0-flash",
1176      "name": "Gemini 2.0 Flash",
1177      "cost_per_1m_in": 0.15,
1178      "cost_per_1m_out": 0.6,
1179      "cost_per_1m_in_cached": 0.025,
1180      "cost_per_1m_out_cached": 0,
1181      "context_window": 1048576,
1182      "default_max_tokens": 8000,
1183      "can_reason": false,
1184      "supports_attachments": true
1185    },
1186    {
1187      "id": "google/gemini-2.0-flash-lite",
1188      "name": "Gemini 2.0 Flash Lite",
1189      "cost_per_1m_in": 0.075,
1190      "cost_per_1m_out": 0.3,
1191      "cost_per_1m_in_cached": 0.02,
1192      "cost_per_1m_out_cached": 0,
1193      "context_window": 1048576,
1194      "default_max_tokens": 8000,
1195      "can_reason": false,
1196      "supports_attachments": true
1197    },
1198    {
1199      "id": "google/gemini-2.5-flash",
1200      "name": "Gemini 2.5 Flash",
1201      "cost_per_1m_in": 0.3,
1202      "cost_per_1m_out": 2.5,
1203      "cost_per_1m_in_cached": 0.03,
1204      "cost_per_1m_out_cached": 0,
1205      "context_window": 1000000,
1206      "default_max_tokens": 8000,
1207      "can_reason": true,
1208      "reasoning_levels": [
1209        "low",
1210        "medium",
1211        "high"
1212      ],
1213      "default_reasoning_effort": "medium",
1214      "supports_attachments": true
1215    },
1216    {
1217      "id": "google/gemini-2.5-flash-lite",
1218      "name": "Gemini 2.5 Flash Lite",
1219      "cost_per_1m_in": 0.1,
1220      "cost_per_1m_out": 0.4,
1221      "cost_per_1m_in_cached": 0.01,
1222      "cost_per_1m_out_cached": 0,
1223      "context_window": 1048576,
1224      "default_max_tokens": 8000,
1225      "can_reason": true,
1226      "reasoning_levels": [
1227        "low",
1228        "medium",
1229        "high"
1230      ],
1231      "default_reasoning_effort": "medium",
1232      "supports_attachments": true
1233    },
1234    {
1235      "id": "google/gemini-2.5-pro",
1236      "name": "Gemini 2.5 Pro",
1237      "cost_per_1m_in": 1.25,
1238      "cost_per_1m_out": 10,
1239      "cost_per_1m_in_cached": 0.125,
1240      "cost_per_1m_out_cached": 0,
1241      "context_window": 1048576,
1242      "default_max_tokens": 8000,
1243      "can_reason": true,
1244      "reasoning_levels": [
1245        "low",
1246        "medium",
1247        "high"
1248      ],
1249      "default_reasoning_effort": "medium",
1250      "supports_attachments": true
1251    },
1252    {
1253      "id": "google/gemini-3-flash",
1254      "name": "Gemini 3 Flash",
1255      "cost_per_1m_in": 0.5,
1256      "cost_per_1m_out": 3,
1257      "cost_per_1m_in_cached": 0.05,
1258      "cost_per_1m_out_cached": 0,
1259      "context_window": 1000000,
1260      "default_max_tokens": 8000,
1261      "can_reason": true,
1262      "reasoning_levels": [
1263        "low",
1264        "medium",
1265        "high"
1266      ],
1267      "default_reasoning_effort": "medium",
1268      "supports_attachments": true
1269    },
1270    {
1271      "id": "google/gemini-3-pro-preview",
1272      "name": "Gemini 3 Pro Preview",
1273      "cost_per_1m_in": 2,
1274      "cost_per_1m_out": 12,
1275      "cost_per_1m_in_cached": 0.2,
1276      "cost_per_1m_out_cached": 0,
1277      "context_window": 1000000,
1278      "default_max_tokens": 8000,
1279      "can_reason": true,
1280      "reasoning_levels": [
1281        "low",
1282        "medium",
1283        "high"
1284      ],
1285      "default_reasoning_effort": "medium",
1286      "supports_attachments": true
1287    },
1288    {
1289      "id": "google/gemini-3.1-flash-lite-preview",
1290      "name": "Gemini 3.1 Flash Lite Preview",
1291      "cost_per_1m_in": 0.25,
1292      "cost_per_1m_out": 1.5,
1293      "cost_per_1m_in_cached": 0.03,
1294      "cost_per_1m_out_cached": 0,
1295      "context_window": 1000000,
1296      "default_max_tokens": 8000,
1297      "can_reason": true,
1298      "reasoning_levels": [
1299        "low",
1300        "medium",
1301        "high"
1302      ],
1303      "default_reasoning_effort": "medium",
1304      "supports_attachments": true
1305    },
1306    {
1307      "id": "google/gemini-3.1-pro-preview",
1308      "name": "Gemini 3.1 Pro Preview",
1309      "cost_per_1m_in": 2,
1310      "cost_per_1m_out": 12,
1311      "cost_per_1m_in_cached": 0.2,
1312      "cost_per_1m_out_cached": 0,
1313      "context_window": 1000000,
1314      "default_max_tokens": 8000,
1315      "can_reason": true,
1316      "reasoning_levels": [
1317        "low",
1318        "medium",
1319        "high"
1320      ],
1321      "default_reasoning_effort": "medium",
1322      "supports_attachments": true
1323    },
1324    {
1325      "id": "google/gemma-4-26b-a4b-it",
1326      "name": "Gemma 4 26B A4B IT",
1327      "cost_per_1m_in": 0.13,
1328      "cost_per_1m_out": 0.4,
1329      "cost_per_1m_in_cached": 0,
1330      "cost_per_1m_out_cached": 0,
1331      "context_window": 262144,
1332      "default_max_tokens": 8000,
1333      "can_reason": false,
1334      "supports_attachments": true
1335    },
1336    {
1337      "id": "google/gemma-4-31b-it",
1338      "name": "Gemma 4 31B IT",
1339      "cost_per_1m_in": 0.14,
1340      "cost_per_1m_out": 0.4,
1341      "cost_per_1m_in_cached": 0,
1342      "cost_per_1m_out_cached": 0,
1343      "context_window": 262144,
1344      "default_max_tokens": 8000,
1345      "can_reason": false,
1346      "supports_attachments": true
1347    },
1348    {
1349      "id": "xai/grok-3",
1350      "name": "Grok 3 Beta",
1351      "cost_per_1m_in": 3,
1352      "cost_per_1m_out": 15,
1353      "cost_per_1m_in_cached": 0.75,
1354      "cost_per_1m_out_cached": 0,
1355      "context_window": 131072,
1356      "default_max_tokens": 8000,
1357      "can_reason": false,
1358      "supports_attachments": false
1359    },
1360    {
1361      "id": "xai/grok-3-fast",
1362      "name": "Grok 3 Fast Beta",
1363      "cost_per_1m_in": 5,
1364      "cost_per_1m_out": 25,
1365      "cost_per_1m_in_cached": 1.25,
1366      "cost_per_1m_out_cached": 0,
1367      "context_window": 131072,
1368      "default_max_tokens": 8000,
1369      "can_reason": false,
1370      "supports_attachments": false
1371    },
1372    {
1373      "id": "xai/grok-3-mini",
1374      "name": "Grok 3 Mini Beta",
1375      "cost_per_1m_in": 0.3,
1376      "cost_per_1m_out": 0.5,
1377      "cost_per_1m_in_cached": 0.075,
1378      "cost_per_1m_out_cached": 0,
1379      "context_window": 131072,
1380      "default_max_tokens": 8000,
1381      "can_reason": false,
1382      "supports_attachments": false
1383    },
1384    {
1385      "id": "xai/grok-3-mini-fast",
1386      "name": "Grok 3 Mini Fast Beta",
1387      "cost_per_1m_in": 0.6,
1388      "cost_per_1m_out": 4,
1389      "cost_per_1m_in_cached": 0,
1390      "cost_per_1m_out_cached": 0,
1391      "context_window": 131072,
1392      "default_max_tokens": 8000,
1393      "can_reason": false,
1394      "supports_attachments": false
1395    },
1396    {
1397      "id": "xai/grok-4",
1398      "name": "Grok 4",
1399      "cost_per_1m_in": 3,
1400      "cost_per_1m_out": 15,
1401      "cost_per_1m_in_cached": 0.75,
1402      "cost_per_1m_out_cached": 0,
1403      "context_window": 256000,
1404      "default_max_tokens": 8000,
1405      "can_reason": true,
1406      "reasoning_levels": [
1407        "low",
1408        "medium",
1409        "high"
1410      ],
1411      "default_reasoning_effort": "medium",
1412      "supports_attachments": true
1413    },
1414    {
1415      "id": "xai/grok-4-fast-non-reasoning",
1416      "name": "Grok 4 Fast Non-Reasoning",
1417      "cost_per_1m_in": 0.2,
1418      "cost_per_1m_out": 0.5,
1419      "cost_per_1m_in_cached": 0.05,
1420      "cost_per_1m_out_cached": 0,
1421      "context_window": 2000000,
1422      "default_max_tokens": 8000,
1423      "can_reason": false,
1424      "supports_attachments": true
1425    },
1426    {
1427      "id": "xai/grok-4-fast-reasoning",
1428      "name": "Grok 4 Fast Reasoning",
1429      "cost_per_1m_in": 0.2,
1430      "cost_per_1m_out": 0.5,
1431      "cost_per_1m_in_cached": 0.05,
1432      "cost_per_1m_out_cached": 0,
1433      "context_window": 2000000,
1434      "default_max_tokens": 8000,
1435      "can_reason": true,
1436      "reasoning_levels": [
1437        "low",
1438        "medium",
1439        "high"
1440      ],
1441      "default_reasoning_effort": "medium",
1442      "supports_attachments": true
1443    },
1444    {
1445      "id": "xai/grok-4.1-fast-non-reasoning",
1446      "name": "Grok 4.1 Fast Non-Reasoning",
1447      "cost_per_1m_in": 0.2,
1448      "cost_per_1m_out": 0.5,
1449      "cost_per_1m_in_cached": 0.05,
1450      "cost_per_1m_out_cached": 0,
1451      "context_window": 2000000,
1452      "default_max_tokens": 8000,
1453      "can_reason": false,
1454      "supports_attachments": true
1455    },
1456    {
1457      "id": "xai/grok-4.1-fast-reasoning",
1458      "name": "Grok 4.1 Fast Reasoning",
1459      "cost_per_1m_in": 0.2,
1460      "cost_per_1m_out": 0.5,
1461      "cost_per_1m_in_cached": 0.05,
1462      "cost_per_1m_out_cached": 0,
1463      "context_window": 2000000,
1464      "default_max_tokens": 8000,
1465      "can_reason": true,
1466      "reasoning_levels": [
1467        "low",
1468        "medium",
1469        "high"
1470      ],
1471      "default_reasoning_effort": "medium",
1472      "supports_attachments": true
1473    },
1474    {
1475      "id": "xai/grok-4.20-non-reasoning-beta",
1476      "name": "Grok 4.20 Beta Non-Reasoning",
1477      "cost_per_1m_in": 2,
1478      "cost_per_1m_out": 6,
1479      "cost_per_1m_in_cached": 0.2,
1480      "cost_per_1m_out_cached": 0,
1481      "context_window": 2000000,
1482      "default_max_tokens": 8000,
1483      "can_reason": false,
1484      "supports_attachments": true
1485    },
1486    {
1487      "id": "xai/grok-4.20-reasoning-beta",
1488      "name": "Grok 4.20 Beta Reasoning",
1489      "cost_per_1m_in": 2,
1490      "cost_per_1m_out": 6,
1491      "cost_per_1m_in_cached": 0.2,
1492      "cost_per_1m_out_cached": 0,
1493      "context_window": 2000000,
1494      "default_max_tokens": 8000,
1495      "can_reason": true,
1496      "reasoning_levels": [
1497        "low",
1498        "medium",
1499        "high"
1500      ],
1501      "default_reasoning_effort": "medium",
1502      "supports_attachments": true
1503    },
1504    {
1505      "id": "xai/grok-4.20-multi-agent-beta",
1506      "name": "Grok 4.20 Multi Agent Beta",
1507      "cost_per_1m_in": 2,
1508      "cost_per_1m_out": 6,
1509      "cost_per_1m_in_cached": 0.2,
1510      "cost_per_1m_out_cached": 0,
1511      "context_window": 2000000,
1512      "default_max_tokens": 8000,
1513      "can_reason": true,
1514      "reasoning_levels": [
1515        "low",
1516        "medium",
1517        "high"
1518      ],
1519      "default_reasoning_effort": "medium",
1520      "supports_attachments": true
1521    },
1522    {
1523      "id": "xai/grok-4.20-multi-agent",
1524      "name": "Grok 4.20 Multi-Agent",
1525      "cost_per_1m_in": 2,
1526      "cost_per_1m_out": 6,
1527      "cost_per_1m_in_cached": 0.2,
1528      "cost_per_1m_out_cached": 0,
1529      "context_window": 2000000,
1530      "default_max_tokens": 8000,
1531      "can_reason": true,
1532      "reasoning_levels": [
1533        "low",
1534        "medium",
1535        "high"
1536      ],
1537      "default_reasoning_effort": "medium",
1538      "supports_attachments": true
1539    },
1540    {
1541      "id": "xai/grok-4.20-non-reasoning",
1542      "name": "Grok 4.20 Non-Reasoning",
1543      "cost_per_1m_in": 2,
1544      "cost_per_1m_out": 6,
1545      "cost_per_1m_in_cached": 0.2,
1546      "cost_per_1m_out_cached": 0,
1547      "context_window": 2000000,
1548      "default_max_tokens": 8000,
1549      "can_reason": false,
1550      "supports_attachments": true
1551    },
1552    {
1553      "id": "xai/grok-4.20-reasoning",
1554      "name": "Grok 4.20 Reasoning",
1555      "cost_per_1m_in": 2,
1556      "cost_per_1m_out": 6,
1557      "cost_per_1m_in_cached": 0.2,
1558      "cost_per_1m_out_cached": 0,
1559      "context_window": 2000000,
1560      "default_max_tokens": 8000,
1561      "can_reason": true,
1562      "reasoning_levels": [
1563        "low",
1564        "medium",
1565        "high"
1566      ],
1567      "default_reasoning_effort": "medium",
1568      "supports_attachments": true
1569    },
1570    {
1571      "id": "xai/grok-code-fast-1",
1572      "name": "Grok Code Fast 1",
1573      "cost_per_1m_in": 0.2,
1574      "cost_per_1m_out": 1.5,
1575      "cost_per_1m_in_cached": 0.02,
1576      "cost_per_1m_out_cached": 0,
1577      "context_window": 256000,
1578      "default_max_tokens": 8000,
1579      "can_reason": true,
1580      "reasoning_levels": [
1581        "low",
1582        "medium",
1583        "high"
1584      ],
1585      "default_reasoning_effort": "medium",
1586      "supports_attachments": false
1587    },
1588    {
1589      "id": "prime-intellect/intellect-3",
1590      "name": "INTELLECT 3",
1591      "cost_per_1m_in": 0.2,
1592      "cost_per_1m_out": 1.1,
1593      "cost_per_1m_in_cached": 0,
1594      "cost_per_1m_out_cached": 0,
1595      "context_window": 131072,
1596      "default_max_tokens": 8000,
1597      "can_reason": true,
1598      "reasoning_levels": [
1599        "low",
1600        "medium",
1601        "high"
1602      ],
1603      "default_reasoning_effort": "medium",
1604      "supports_attachments": false
1605    },
1606    {
1607      "id": "kwaipilot/kat-coder-pro-v2",
1608      "name": "Kat Coder Pro V2",
1609      "cost_per_1m_in": 0.3,
1610      "cost_per_1m_out": 1.2,
1611      "cost_per_1m_in_cached": 0.06,
1612      "cost_per_1m_out_cached": 0,
1613      "context_window": 256000,
1614      "default_max_tokens": 8000,
1615      "can_reason": true,
1616      "reasoning_levels": [
1617        "low",
1618        "medium",
1619        "high"
1620      ],
1621      "default_reasoning_effort": "medium",
1622      "supports_attachments": false
1623    },
1624    {
1625      "id": "moonshotai/kimi-k2-0905",
1626      "name": "Kimi K2 0905",
1627      "cost_per_1m_in": 0.6,
1628      "cost_per_1m_out": 2.5,
1629      "cost_per_1m_in_cached": 0.3,
1630      "cost_per_1m_out_cached": 0,
1631      "context_window": 256000,
1632      "default_max_tokens": 8000,
1633      "can_reason": false,
1634      "supports_attachments": false
1635    },
1636    {
1637      "id": "moonshotai/kimi-k2",
1638      "name": "Kimi K2 Instruct",
1639      "cost_per_1m_in": 0.57,
1640      "cost_per_1m_out": 2.3,
1641      "cost_per_1m_in_cached": 0,
1642      "cost_per_1m_out_cached": 0,
1643      "context_window": 131072,
1644      "default_max_tokens": 8000,
1645      "can_reason": false,
1646      "supports_attachments": false
1647    },
1648    {
1649      "id": "moonshotai/kimi-k2-thinking",
1650      "name": "Kimi K2 Thinking",
1651      "cost_per_1m_in": 0.6,
1652      "cost_per_1m_out": 2.5,
1653      "cost_per_1m_in_cached": 0.15,
1654      "cost_per_1m_out_cached": 0,
1655      "context_window": 262114,
1656      "default_max_tokens": 8000,
1657      "can_reason": true,
1658      "reasoning_levels": [
1659        "low",
1660        "medium",
1661        "high"
1662      ],
1663      "default_reasoning_effort": "medium",
1664      "supports_attachments": false
1665    },
1666    {
1667      "id": "moonshotai/kimi-k2-thinking-turbo",
1668      "name": "Kimi K2 Thinking Turbo",
1669      "cost_per_1m_in": 1.15,
1670      "cost_per_1m_out": 8,
1671      "cost_per_1m_in_cached": 0.15,
1672      "cost_per_1m_out_cached": 0,
1673      "context_window": 262114,
1674      "default_max_tokens": 8000,
1675      "can_reason": true,
1676      "reasoning_levels": [
1677        "low",
1678        "medium",
1679        "high"
1680      ],
1681      "default_reasoning_effort": "medium",
1682      "supports_attachments": false
1683    },
1684    {
1685      "id": "moonshotai/kimi-k2-turbo",
1686      "name": "Kimi K2 Turbo",
1687      "cost_per_1m_in": 1.15,
1688      "cost_per_1m_out": 8,
1689      "cost_per_1m_in_cached": 0.15,
1690      "cost_per_1m_out_cached": 0,
1691      "context_window": 256000,
1692      "default_max_tokens": 8000,
1693      "can_reason": false,
1694      "supports_attachments": false
1695    },
1696    {
1697      "id": "moonshotai/kimi-k2.5",
1698      "name": "Kimi K2.5",
1699      "cost_per_1m_in": 0.6,
1700      "cost_per_1m_out": 3,
1701      "cost_per_1m_in_cached": 0.1,
1702      "cost_per_1m_out_cached": 0,
1703      "context_window": 262114,
1704      "default_max_tokens": 8000,
1705      "can_reason": true,
1706      "reasoning_levels": [
1707        "low",
1708        "medium",
1709        "high"
1710      ],
1711      "default_reasoning_effort": "medium",
1712      "supports_attachments": true
1713    },
1714    {
1715      "id": "moonshotai/kimi-k2.6",
1716      "name": "Kimi K2.6",
1717      "cost_per_1m_in": 0.95,
1718      "cost_per_1m_out": 4,
1719      "cost_per_1m_in_cached": 0.16,
1720      "cost_per_1m_out_cached": 0,
1721      "context_window": 262000,
1722      "default_max_tokens": 8000,
1723      "can_reason": true,
1724      "reasoning_levels": [
1725        "low",
1726        "medium",
1727        "high"
1728      ],
1729      "default_reasoning_effort": "medium",
1730      "supports_attachments": true
1731    },
1732    {
1733      "id": "meta/llama-3.1-70b",
1734      "name": "Llama 3.1 70B Instruct",
1735      "cost_per_1m_in": 0.72,
1736      "cost_per_1m_out": 0.72,
1737      "cost_per_1m_in_cached": 0,
1738      "cost_per_1m_out_cached": 0,
1739      "context_window": 128000,
1740      "default_max_tokens": 8000,
1741      "can_reason": false,
1742      "supports_attachments": false
1743    },
1744    {
1745      "id": "meta/llama-3.1-8b",
1746      "name": "Llama 3.1 8B Instruct",
1747      "cost_per_1m_in": 0.22,
1748      "cost_per_1m_out": 0.22,
1749      "cost_per_1m_in_cached": 0,
1750      "cost_per_1m_out_cached": 0,
1751      "context_window": 128000,
1752      "default_max_tokens": 8000,
1753      "can_reason": false,
1754      "supports_attachments": false
1755    },
1756    {
1757      "id": "meta/llama-3.2-11b",
1758      "name": "Llama 3.2 11B Vision Instruct",
1759      "cost_per_1m_in": 0.16,
1760      "cost_per_1m_out": 0.16,
1761      "cost_per_1m_in_cached": 0,
1762      "cost_per_1m_out_cached": 0,
1763      "context_window": 128000,
1764      "default_max_tokens": 8000,
1765      "can_reason": false,
1766      "supports_attachments": true
1767    },
1768    {
1769      "id": "meta/llama-3.2-90b",
1770      "name": "Llama 3.2 90B Vision Instruct",
1771      "cost_per_1m_in": 0.72,
1772      "cost_per_1m_out": 0.72,
1773      "cost_per_1m_in_cached": 0,
1774      "cost_per_1m_out_cached": 0,
1775      "context_window": 128000,
1776      "default_max_tokens": 8000,
1777      "can_reason": false,
1778      "supports_attachments": true
1779    },
1780    {
1781      "id": "meta/llama-3.3-70b",
1782      "name": "Llama 3.3 70B Instruct",
1783      "cost_per_1m_in": 0.72,
1784      "cost_per_1m_out": 0.72,
1785      "cost_per_1m_in_cached": 0,
1786      "cost_per_1m_out_cached": 0,
1787      "context_window": 128000,
1788      "default_max_tokens": 8000,
1789      "can_reason": false,
1790      "supports_attachments": false
1791    },
1792    {
1793      "id": "meta/llama-4-maverick",
1794      "name": "Llama 4 Maverick 17B Instruct",
1795      "cost_per_1m_in": 0.24,
1796      "cost_per_1m_out": 0.97,
1797      "cost_per_1m_in_cached": 0,
1798      "cost_per_1m_out_cached": 0,
1799      "context_window": 128000,
1800      "default_max_tokens": 8000,
1801      "can_reason": false,
1802      "supports_attachments": true
1803    },
1804    {
1805      "id": "meta/llama-4-scout",
1806      "name": "Llama 4 Scout 17B Instruct",
1807      "cost_per_1m_in": 0.17,
1808      "cost_per_1m_out": 0.66,
1809      "cost_per_1m_in_cached": 0,
1810      "cost_per_1m_out_cached": 0,
1811      "context_window": 128000,
1812      "default_max_tokens": 8000,
1813      "can_reason": false,
1814      "supports_attachments": true
1815    },
1816    {
1817      "id": "meituan/longcat-flash-chat",
1818      "name": "LongCat Flash Chat",
1819      "cost_per_1m_in": 0,
1820      "cost_per_1m_out": 0,
1821      "cost_per_1m_in_cached": 0,
1822      "cost_per_1m_out_cached": 0,
1823      "context_window": 128000,
1824      "default_max_tokens": 8000,
1825      "can_reason": false,
1826      "supports_attachments": false
1827    },
1828    {
1829      "id": "inception/mercury-2",
1830      "name": "Mercury 2",
1831      "cost_per_1m_in": 0.25,
1832      "cost_per_1m_out": 0.75,
1833      "cost_per_1m_in_cached": 0.025,
1834      "cost_per_1m_out_cached": 0,
1835      "context_window": 128000,
1836      "default_max_tokens": 8000,
1837      "can_reason": true,
1838      "reasoning_levels": [
1839        "low",
1840        "medium",
1841        "high"
1842      ],
1843      "default_reasoning_effort": "medium",
1844      "supports_attachments": false
1845    },
1846    {
1847      "id": "inception/mercury-coder-small",
1848      "name": "Mercury Coder Small Beta",
1849      "cost_per_1m_in": 0.25,
1850      "cost_per_1m_out": 1,
1851      "cost_per_1m_in_cached": 0,
1852      "cost_per_1m_out_cached": 0,
1853      "context_window": 32000,
1854      "default_max_tokens": 8000,
1855      "can_reason": false,
1856      "supports_attachments": false
1857    },
1858    {
1859      "id": "xiaomi/mimo-v2-flash",
1860      "name": "MiMo V2 Flash",
1861      "cost_per_1m_in": 0.1,
1862      "cost_per_1m_out": 0.3,
1863      "cost_per_1m_in_cached": 0.01,
1864      "cost_per_1m_out_cached": 0,
1865      "context_window": 262144,
1866      "default_max_tokens": 8000,
1867      "can_reason": true,
1868      "reasoning_levels": [
1869        "low",
1870        "medium",
1871        "high"
1872      ],
1873      "default_reasoning_effort": "medium",
1874      "supports_attachments": false
1875    },
1876    {
1877      "id": "xiaomi/mimo-v2-pro",
1878      "name": "MiMo V2 Pro",
1879      "cost_per_1m_in": 1,
1880      "cost_per_1m_out": 3,
1881      "cost_per_1m_in_cached": 0.2,
1882      "cost_per_1m_out_cached": 0,
1883      "context_window": 1000000,
1884      "default_max_tokens": 8000,
1885      "can_reason": true,
1886      "reasoning_levels": [
1887        "low",
1888        "medium",
1889        "high"
1890      ],
1891      "default_reasoning_effort": "medium",
1892      "supports_attachments": false
1893    },
1894    {
1895      "id": "minimax/minimax-m2",
1896      "name": "MiniMax M2",
1897      "cost_per_1m_in": 0.3,
1898      "cost_per_1m_out": 1.2,
1899      "cost_per_1m_in_cached": 0.03,
1900      "cost_per_1m_out_cached": 0.375,
1901      "context_window": 205000,
1902      "default_max_tokens": 8000,
1903      "can_reason": true,
1904      "reasoning_levels": [
1905        "low",
1906        "medium",
1907        "high"
1908      ],
1909      "default_reasoning_effort": "medium",
1910      "supports_attachments": false
1911    },
1912    {
1913      "id": "minimax/minimax-m2.1",
1914      "name": "MiniMax M2.1",
1915      "cost_per_1m_in": 0.3,
1916      "cost_per_1m_out": 1.2,
1917      "cost_per_1m_in_cached": 0.03,
1918      "cost_per_1m_out_cached": 0.375,
1919      "context_window": 204800,
1920      "default_max_tokens": 8000,
1921      "can_reason": true,
1922      "reasoning_levels": [
1923        "low",
1924        "medium",
1925        "high"
1926      ],
1927      "default_reasoning_effort": "medium",
1928      "supports_attachments": false
1929    },
1930    {
1931      "id": "minimax/minimax-m2.1-lightning",
1932      "name": "MiniMax M2.1 Lightning",
1933      "cost_per_1m_in": 0.3,
1934      "cost_per_1m_out": 2.4,
1935      "cost_per_1m_in_cached": 0.03,
1936      "cost_per_1m_out_cached": 0.375,
1937      "context_window": 204800,
1938      "default_max_tokens": 8000,
1939      "can_reason": true,
1940      "reasoning_levels": [
1941        "low",
1942        "medium",
1943        "high"
1944      ],
1945      "default_reasoning_effort": "medium",
1946      "supports_attachments": false
1947    },
1948    {
1949      "id": "minimax/minimax-m2.5",
1950      "name": "MiniMax M2.5",
1951      "cost_per_1m_in": 0.3,
1952      "cost_per_1m_out": 1.2,
1953      "cost_per_1m_in_cached": 0.03,
1954      "cost_per_1m_out_cached": 0.375,
1955      "context_window": 204800,
1956      "default_max_tokens": 8000,
1957      "can_reason": true,
1958      "reasoning_levels": [
1959        "low",
1960        "medium",
1961        "high"
1962      ],
1963      "default_reasoning_effort": "medium",
1964      "supports_attachments": false
1965    },
1966    {
1967      "id": "minimax/minimax-m2.5-highspeed",
1968      "name": "MiniMax M2.5 High Speed",
1969      "cost_per_1m_in": 0.6,
1970      "cost_per_1m_out": 2.4,
1971      "cost_per_1m_in_cached": 0.03,
1972      "cost_per_1m_out_cached": 0.375,
1973      "context_window": 204800,
1974      "default_max_tokens": 8000,
1975      "can_reason": true,
1976      "reasoning_levels": [
1977        "low",
1978        "medium",
1979        "high"
1980      ],
1981      "default_reasoning_effort": "medium",
1982      "supports_attachments": false
1983    },
1984    {
1985      "id": "minimax/minimax-m2.7-highspeed",
1986      "name": "MiniMax M2.7 High Speed",
1987      "cost_per_1m_in": 0.6,
1988      "cost_per_1m_out": 2.4,
1989      "cost_per_1m_in_cached": 0.06,
1990      "cost_per_1m_out_cached": 0.375,
1991      "context_window": 204800,
1992      "default_max_tokens": 8000,
1993      "can_reason": true,
1994      "reasoning_levels": [
1995        "low",
1996        "medium",
1997        "high"
1998      ],
1999      "default_reasoning_effort": "medium",
2000      "supports_attachments": true
2001    },
2002    {
2003      "id": "minimax/minimax-m2.7",
2004      "name": "Minimax M2.7",
2005      "cost_per_1m_in": 0.3,
2006      "cost_per_1m_out": 1.2,
2007      "cost_per_1m_in_cached": 0.06,
2008      "cost_per_1m_out_cached": 0.375,
2009      "context_window": 204800,
2010      "default_max_tokens": 8000,
2011      "can_reason": true,
2012      "reasoning_levels": [
2013        "low",
2014        "medium",
2015        "high"
2016      ],
2017      "default_reasoning_effort": "medium",
2018      "supports_attachments": true
2019    },
2020    {
2021      "id": "mistral/ministral-3b",
2022      "name": "Ministral 3B",
2023      "cost_per_1m_in": 0.1,
2024      "cost_per_1m_out": 0.1,
2025      "cost_per_1m_in_cached": 0,
2026      "cost_per_1m_out_cached": 0,
2027      "context_window": 128000,
2028      "default_max_tokens": 4000,
2029      "can_reason": false,
2030      "supports_attachments": false
2031    },
2032    {
2033      "id": "mistral/ministral-8b",
2034      "name": "Ministral 8B",
2035      "cost_per_1m_in": 0.15,
2036      "cost_per_1m_out": 0.15,
2037      "cost_per_1m_in_cached": 0,
2038      "cost_per_1m_out_cached": 0,
2039      "context_window": 128000,
2040      "default_max_tokens": 4000,
2041      "can_reason": false,
2042      "supports_attachments": false
2043    },
2044    {
2045      "id": "mistral/codestral",
2046      "name": "Mistral Codestral",
2047      "cost_per_1m_in": 0.3,
2048      "cost_per_1m_out": 0.9,
2049      "cost_per_1m_in_cached": 0,
2050      "cost_per_1m_out_cached": 0,
2051      "context_window": 128000,
2052      "default_max_tokens": 4000,
2053      "can_reason": false,
2054      "supports_attachments": false
2055    },
2056    {
2057      "id": "mistral/mistral-medium",
2058      "name": "Mistral Medium 3.1",
2059      "cost_per_1m_in": 0.4,
2060      "cost_per_1m_out": 2,
2061      "cost_per_1m_in_cached": 0,
2062      "cost_per_1m_out_cached": 0,
2063      "context_window": 128000,
2064      "default_max_tokens": 8000,
2065      "can_reason": false,
2066      "supports_attachments": true
2067    },
2068    {
2069      "id": "mistral/mistral-small",
2070      "name": "Mistral Small",
2071      "cost_per_1m_in": 0.1,
2072      "cost_per_1m_out": 0.3,
2073      "cost_per_1m_in_cached": 0,
2074      "cost_per_1m_out_cached": 0,
2075      "context_window": 32000,
2076      "default_max_tokens": 4000,
2077      "can_reason": false,
2078      "supports_attachments": true
2079    },
2080    {
2081      "id": "nvidia/nemotron-nano-12b-v2-vl",
2082      "name": "Nvidia Nemotron Nano 12B V2 VL",
2083      "cost_per_1m_in": 0.2,
2084      "cost_per_1m_out": 0.6,
2085      "cost_per_1m_in_cached": 0,
2086      "cost_per_1m_out_cached": 0,
2087      "context_window": 131072,
2088      "default_max_tokens": 8000,
2089      "can_reason": true,
2090      "reasoning_levels": [
2091        "low",
2092        "medium",
2093        "high"
2094      ],
2095      "default_reasoning_effort": "medium",
2096      "supports_attachments": true
2097    },
2098    {
2099      "id": "nvidia/nemotron-nano-9b-v2",
2100      "name": "Nvidia Nemotron Nano 9B V2",
2101      "cost_per_1m_in": 0.06,
2102      "cost_per_1m_out": 0.23,
2103      "cost_per_1m_in_cached": 0,
2104      "cost_per_1m_out_cached": 0,
2105      "context_window": 131072,
2106      "default_max_tokens": 8000,
2107      "can_reason": true,
2108      "reasoning_levels": [
2109        "low",
2110        "medium",
2111        "high"
2112      ],
2113      "default_reasoning_effort": "medium",
2114      "supports_attachments": false
2115    },
2116    {
2117      "id": "mistral/pixtral-12b",
2118      "name": "Pixtral 12B 2409",
2119      "cost_per_1m_in": 0.15,
2120      "cost_per_1m_out": 0.15,
2121      "cost_per_1m_in_cached": 0,
2122      "cost_per_1m_out_cached": 0,
2123      "context_window": 128000,
2124      "default_max_tokens": 4000,
2125      "can_reason": false,
2126      "supports_attachments": true
2127    },
2128    {
2129      "id": "mistral/pixtral-large",
2130      "name": "Pixtral Large",
2131      "cost_per_1m_in": 2,
2132      "cost_per_1m_out": 6,
2133      "cost_per_1m_in_cached": 0,
2134      "cost_per_1m_out_cached": 0,
2135      "context_window": 128000,
2136      "default_max_tokens": 4000,
2137      "can_reason": false,
2138      "supports_attachments": true
2139    },
2140    {
2141      "id": "alibaba/qwen-3-32b",
2142      "name": "Qwen 3 32B",
2143      "cost_per_1m_in": 0.16,
2144      "cost_per_1m_out": 0.64,
2145      "cost_per_1m_in_cached": 0,
2146      "cost_per_1m_out_cached": 0,
2147      "context_window": 128000,
2148      "default_max_tokens": 8000,
2149      "can_reason": true,
2150      "reasoning_levels": [
2151        "low",
2152        "medium",
2153        "high"
2154      ],
2155      "default_reasoning_effort": "medium",
2156      "supports_attachments": false
2157    },
2158    {
2159      "id": "alibaba/qwen3-coder-30b-a3b",
2160      "name": "Qwen 3 Coder 30B A3B Instruct",
2161      "cost_per_1m_in": 0.15,
2162      "cost_per_1m_out": 0.6,
2163      "cost_per_1m_in_cached": 0,
2164      "cost_per_1m_out_cached": 0,
2165      "context_window": 262144,
2166      "default_max_tokens": 8000,
2167      "can_reason": true,
2168      "reasoning_levels": [
2169        "low",
2170        "medium",
2171        "high"
2172      ],
2173      "default_reasoning_effort": "medium",
2174      "supports_attachments": false
2175    },
2176    {
2177      "id": "alibaba/qwen3-max-thinking",
2178      "name": "Qwen 3 Max Thinking",
2179      "cost_per_1m_in": 1.2,
2180      "cost_per_1m_out": 6,
2181      "cost_per_1m_in_cached": 0.24,
2182      "cost_per_1m_out_cached": 0,
2183      "context_window": 256000,
2184      "default_max_tokens": 8000,
2185      "can_reason": true,
2186      "reasoning_levels": [
2187        "low",
2188        "medium",
2189        "high"
2190      ],
2191      "default_reasoning_effort": "medium",
2192      "supports_attachments": false
2193    },
2194    {
2195      "id": "alibaba/qwen3.5-flash",
2196      "name": "Qwen 3.5 Flash",
2197      "cost_per_1m_in": 0.1,
2198      "cost_per_1m_out": 0.4,
2199      "cost_per_1m_in_cached": 0.001,
2200      "cost_per_1m_out_cached": 0.125,
2201      "context_window": 1000000,
2202      "default_max_tokens": 8000,
2203      "can_reason": true,
2204      "reasoning_levels": [
2205        "low",
2206        "medium",
2207        "high"
2208      ],
2209      "default_reasoning_effort": "medium",
2210      "supports_attachments": true
2211    },
2212    {
2213      "id": "alibaba/qwen3.5-plus",
2214      "name": "Qwen 3.5 Plus",
2215      "cost_per_1m_in": 0.4,
2216      "cost_per_1m_out": 2.4,
2217      "cost_per_1m_in_cached": 0.04,
2218      "cost_per_1m_out_cached": 0.5,
2219      "context_window": 1000000,
2220      "default_max_tokens": 8000,
2221      "can_reason": true,
2222      "reasoning_levels": [
2223        "low",
2224        "medium",
2225        "high"
2226      ],
2227      "default_reasoning_effort": "medium",
2228      "supports_attachments": true
2229    },
2230    {
2231      "id": "alibaba/qwen-3.6-max-preview",
2232      "name": "Qwen 3.6 Max Preview",
2233      "cost_per_1m_in": 1.3,
2234      "cost_per_1m_out": 7.8,
2235      "cost_per_1m_in_cached": 0.26,
2236      "cost_per_1m_out_cached": 1.625,
2237      "context_window": 240000,
2238      "default_max_tokens": 8000,
2239      "can_reason": true,
2240      "reasoning_levels": [
2241        "low",
2242        "medium",
2243        "high"
2244      ],
2245      "default_reasoning_effort": "medium",
2246      "supports_attachments": true
2247    },
2248    {
2249      "id": "alibaba/qwen3.6-plus",
2250      "name": "Qwen 3.6 Plus",
2251      "cost_per_1m_in": 0.5,
2252      "cost_per_1m_out": 3,
2253      "cost_per_1m_in_cached": 0.1,
2254      "cost_per_1m_out_cached": 0.625,
2255      "context_window": 1000000,
2256      "default_max_tokens": 8000,
2257      "can_reason": true,
2258      "reasoning_levels": [
2259        "low",
2260        "medium",
2261        "high"
2262      ],
2263      "default_reasoning_effort": "medium",
2264      "supports_attachments": true
2265    },
2266    {
2267      "id": "alibaba/qwen3-235b-a22b-thinking",
2268      "name": "Qwen3 235B A22B Thinking 2507",
2269      "cost_per_1m_in": 0.23,
2270      "cost_per_1m_out": 2.3,
2271      "cost_per_1m_in_cached": 0.2,
2272      "cost_per_1m_out_cached": 0,
2273      "context_window": 262114,
2274      "default_max_tokens": 8000,
2275      "can_reason": true,
2276      "reasoning_levels": [
2277        "low",
2278        "medium",
2279        "high"
2280      ],
2281      "default_reasoning_effort": "medium",
2282      "supports_attachments": true
2283    },
2284    {
2285      "id": "alibaba/qwen-3-235b",
2286      "name": "Qwen3 235B A22b Instruct 2507",
2287      "cost_per_1m_in": 0.6,
2288      "cost_per_1m_out": 1.2,
2289      "cost_per_1m_in_cached": 0.6,
2290      "cost_per_1m_out_cached": 0,
2291      "context_window": 131000,
2292      "default_max_tokens": 8000,
2293      "can_reason": false,
2294      "supports_attachments": false
2295    },
2296    {
2297      "id": "alibaba/qwen3-coder",
2298      "name": "Qwen3 Coder 480B A35B Instruct",
2299      "cost_per_1m_in": 1.5,
2300      "cost_per_1m_out": 7.5,
2301      "cost_per_1m_in_cached": 0.3,
2302      "cost_per_1m_out_cached": 0,
2303      "context_window": 262144,
2304      "default_max_tokens": 8000,
2305      "can_reason": false,
2306      "supports_attachments": false
2307    },
2308    {
2309      "id": "alibaba/qwen3-coder-next",
2310      "name": "Qwen3 Coder Next",
2311      "cost_per_1m_in": 0.5,
2312      "cost_per_1m_out": 1.2,
2313      "cost_per_1m_in_cached": 0,
2314      "cost_per_1m_out_cached": 0,
2315      "context_window": 256000,
2316      "default_max_tokens": 8000,
2317      "can_reason": false,
2318      "supports_attachments": false
2319    },
2320    {
2321      "id": "alibaba/qwen3-coder-plus",
2322      "name": "Qwen3 Coder Plus",
2323      "cost_per_1m_in": 1,
2324      "cost_per_1m_out": 5,
2325      "cost_per_1m_in_cached": 0.2,
2326      "cost_per_1m_out_cached": 0,
2327      "context_window": 1000000,
2328      "default_max_tokens": 8000,
2329      "can_reason": false,
2330      "supports_attachments": false
2331    },
2332    {
2333      "id": "alibaba/qwen3-max",
2334      "name": "Qwen3 Max",
2335      "cost_per_1m_in": 1.2,
2336      "cost_per_1m_out": 6,
2337      "cost_per_1m_in_cached": 0.24,
2338      "cost_per_1m_out_cached": 0,
2339      "context_window": 262144,
2340      "default_max_tokens": 8000,
2341      "can_reason": false,
2342      "supports_attachments": false
2343    },
2344    {
2345      "id": "alibaba/qwen3-max-preview",
2346      "name": "Qwen3 Max Preview",
2347      "cost_per_1m_in": 1.2,
2348      "cost_per_1m_out": 6,
2349      "cost_per_1m_in_cached": 0.24,
2350      "cost_per_1m_out_cached": 0,
2351      "context_window": 262144,
2352      "default_max_tokens": 8000,
2353      "can_reason": false,
2354      "supports_attachments": false
2355    },
2356    {
2357      "id": "alibaba/qwen3-vl-thinking",
2358      "name": "Qwen3 VL 235B A22B Thinking",
2359      "cost_per_1m_in": 0.4,
2360      "cost_per_1m_out": 4,
2361      "cost_per_1m_in_cached": 0,
2362      "cost_per_1m_out_cached": 0,
2363      "context_window": 131072,
2364      "default_max_tokens": 8000,
2365      "can_reason": true,
2366      "reasoning_levels": [
2367        "low",
2368        "medium",
2369        "high"
2370      ],
2371      "default_reasoning_effort": "medium",
2372      "supports_attachments": true
2373    },
2374    {
2375      "id": "alibaba/qwen-3-14b",
2376      "name": "Qwen3-14B",
2377      "cost_per_1m_in": 0.12,
2378      "cost_per_1m_out": 0.24,
2379      "cost_per_1m_in_cached": 0,
2380      "cost_per_1m_out_cached": 0,
2381      "context_window": 40960,
2382      "default_max_tokens": 8000,
2383      "can_reason": true,
2384      "reasoning_levels": [
2385        "low",
2386        "medium",
2387        "high"
2388      ],
2389      "default_reasoning_effort": "medium",
2390      "supports_attachments": false
2391    },
2392    {
2393      "id": "alibaba/qwen-3-30b",
2394      "name": "Qwen3-30B-A3B",
2395      "cost_per_1m_in": 0.08,
2396      "cost_per_1m_out": 0.29,
2397      "cost_per_1m_in_cached": 0,
2398      "cost_per_1m_out_cached": 0,
2399      "context_window": 40960,
2400      "default_max_tokens": 8000,
2401      "can_reason": true,
2402      "reasoning_levels": [
2403        "low",
2404        "medium",
2405        "high"
2406      ],
2407      "default_reasoning_effort": "medium",
2408      "supports_attachments": false
2409    },
2410    {
2411      "id": "bytedance/seed-1.6",
2412      "name": "Seed 1.6",
2413      "cost_per_1m_in": 0.25,
2414      "cost_per_1m_out": 2,
2415      "cost_per_1m_in_cached": 0.05,
2416      "cost_per_1m_out_cached": 0,
2417      "context_window": 256000,
2418      "default_max_tokens": 8000,
2419      "can_reason": true,
2420      "reasoning_levels": [
2421        "low",
2422        "medium",
2423        "high"
2424      ],
2425      "default_reasoning_effort": "medium",
2426      "supports_attachments": false
2427    },
2428    {
2429      "id": "perplexity/sonar",
2430      "name": "Sonar",
2431      "cost_per_1m_in": 0,
2432      "cost_per_1m_out": 0,
2433      "cost_per_1m_in_cached": 0,
2434      "cost_per_1m_out_cached": 0,
2435      "context_window": 127000,
2436      "default_max_tokens": 8000,
2437      "can_reason": false,
2438      "supports_attachments": true
2439    },
2440    {
2441      "id": "perplexity/sonar-pro",
2442      "name": "Sonar Pro",
2443      "cost_per_1m_in": 0,
2444      "cost_per_1m_out": 0,
2445      "cost_per_1m_in_cached": 0,
2446      "cost_per_1m_out_cached": 0,
2447      "context_window": 200000,
2448      "default_max_tokens": 8000,
2449      "can_reason": false,
2450      "supports_attachments": true
2451    },
2452    {
2453      "id": "arcee-ai/trinity-large-preview",
2454      "name": "Trinity Large Preview",
2455      "cost_per_1m_in": 0.25,
2456      "cost_per_1m_out": 1,
2457      "cost_per_1m_in_cached": 0,
2458      "cost_per_1m_out_cached": 0,
2459      "context_window": 131000,
2460      "default_max_tokens": 8000,
2461      "can_reason": false,
2462      "supports_attachments": false
2463    },
2464    {
2465      "id": "arcee-ai/trinity-large-thinking",
2466      "name": "Trinity Large Thinking",
2467      "cost_per_1m_in": 0.25,
2468      "cost_per_1m_out": 0.9,
2469      "cost_per_1m_in_cached": 0,
2470      "cost_per_1m_out_cached": 0,
2471      "context_window": 262100,
2472      "default_max_tokens": 8000,
2473      "can_reason": true,
2474      "reasoning_levels": [
2475        "low",
2476        "medium",
2477        "high"
2478      ],
2479      "default_reasoning_effort": "medium",
2480      "supports_attachments": false
2481    },
2482    {
2483      "id": "openai/o1",
2484      "name": "o1",
2485      "cost_per_1m_in": 15,
2486      "cost_per_1m_out": 60,
2487      "cost_per_1m_in_cached": 7.5,
2488      "cost_per_1m_out_cached": 0,
2489      "context_window": 200000,
2490      "default_max_tokens": 8000,
2491      "can_reason": true,
2492      "reasoning_levels": [
2493        "low",
2494        "medium",
2495        "high"
2496      ],
2497      "default_reasoning_effort": "medium",
2498      "supports_attachments": true
2499    },
2500    {
2501      "id": "openai/o3",
2502      "name": "o3",
2503      "cost_per_1m_in": 2,
2504      "cost_per_1m_out": 8,
2505      "cost_per_1m_in_cached": 0.5,
2506      "cost_per_1m_out_cached": 0,
2507      "context_window": 200000,
2508      "default_max_tokens": 8000,
2509      "can_reason": true,
2510      "reasoning_levels": [
2511        "low",
2512        "medium",
2513        "high"
2514      ],
2515      "default_reasoning_effort": "medium",
2516      "supports_attachments": true
2517    },
2518    {
2519      "id": "openai/o3-pro",
2520      "name": "o3 Pro",
2521      "cost_per_1m_in": 20,
2522      "cost_per_1m_out": 80,
2523      "cost_per_1m_in_cached": 0,
2524      "cost_per_1m_out_cached": 0,
2525      "context_window": 200000,
2526      "default_max_tokens": 8000,
2527      "can_reason": true,
2528      "reasoning_levels": [
2529        "low",
2530        "medium",
2531        "high"
2532      ],
2533      "default_reasoning_effort": "medium",
2534      "supports_attachments": true
2535    },
2536    {
2537      "id": "openai/o3-deep-research",
2538      "name": "o3-deep-research",
2539      "cost_per_1m_in": 10,
2540      "cost_per_1m_out": 40,
2541      "cost_per_1m_in_cached": 2.5,
2542      "cost_per_1m_out_cached": 0,
2543      "context_window": 200000,
2544      "default_max_tokens": 8000,
2545      "can_reason": true,
2546      "reasoning_levels": [
2547        "low",
2548        "medium",
2549        "high"
2550      ],
2551      "default_reasoning_effort": "medium",
2552      "supports_attachments": true
2553    },
2554    {
2555      "id": "openai/o3-mini",
2556      "name": "o3-mini",
2557      "cost_per_1m_in": 1.1,
2558      "cost_per_1m_out": 4.4,
2559      "cost_per_1m_in_cached": 0.55,
2560      "cost_per_1m_out_cached": 0,
2561      "context_window": 200000,
2562      "default_max_tokens": 8000,
2563      "can_reason": true,
2564      "reasoning_levels": [
2565        "low",
2566        "medium",
2567        "high"
2568      ],
2569      "default_reasoning_effort": "medium",
2570      "supports_attachments": false
2571    },
2572    {
2573      "id": "openai/o4-mini",
2574      "name": "o4-mini",
2575      "cost_per_1m_in": 1.1,
2576      "cost_per_1m_out": 4.4,
2577      "cost_per_1m_in_cached": 0.275,
2578      "cost_per_1m_out_cached": 0,
2579      "context_window": 200000,
2580      "default_max_tokens": 8000,
2581      "can_reason": true,
2582      "reasoning_levels": [
2583        "low",
2584        "medium",
2585        "high"
2586      ],
2587      "default_reasoning_effort": "medium",
2588      "supports_attachments": true
2589    }
2590  ],
2591  "default_headers": {
2592    "HTTP-Referer": "https://charm.land",
2593    "X-Title": "Crush"
2594  }
2595}