vercel.json

   1{
   2  "name": "Vercel",
   3  "id": "vercel",
   4  "api_key": "$VERCEL_API_KEY",
   5  "api_endpoint": "https://ai-gateway.vercel.sh/v1",
   6  "type": "vercel",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-haiku-4.5",
   9  "models": [
  10    {
  11      "id": "anthropic/claude-3-haiku",
  12      "name": "Claude 3 Haiku",
  13      "cost_per_1m_in": 0.25,
  14      "cost_per_1m_out": 1.25,
  15      "cost_per_1m_in_cached": 0.03,
  16      "cost_per_1m_out_cached": 0.3,
  17      "context_window": 200000,
  18      "default_max_tokens": 4096,
  19      "can_reason": false,
  20      "supports_attachments": true
  21    },
  22    {
  23      "id": "anthropic/claude-3.5-haiku",
  24      "name": "Claude 3.5 Haiku",
  25      "cost_per_1m_in": 0.8,
  26      "cost_per_1m_out": 4,
  27      "cost_per_1m_in_cached": 0.08,
  28      "cost_per_1m_out_cached": 1,
  29      "context_window": 200000,
  30      "default_max_tokens": 8000,
  31      "can_reason": false,
  32      "supports_attachments": true
  33    },
  34    {
  35      "id": "anthropic/claude-3.7-sonnet",
  36      "name": "Claude 3.7 Sonnet",
  37      "cost_per_1m_in": 3,
  38      "cost_per_1m_out": 15,
  39      "cost_per_1m_in_cached": 0.3,
  40      "cost_per_1m_out_cached": 3.75,
  41      "context_window": 200000,
  42      "default_max_tokens": 8000,
  43      "can_reason": true,
  44      "reasoning_levels": [
  45        "none",
  46        "minimal",
  47        "low",
  48        "medium",
  49        "high",
  50        "xhigh"
  51      ],
  52      "default_reasoning_effort": "medium",
  53      "supports_attachments": true
  54    },
  55    {
  56      "id": "anthropic/claude-haiku-4.5",
  57      "name": "Claude Haiku 4.5",
  58      "cost_per_1m_in": 1,
  59      "cost_per_1m_out": 5,
  60      "cost_per_1m_in_cached": 0.1,
  61      "cost_per_1m_out_cached": 1.25,
  62      "context_window": 200000,
  63      "default_max_tokens": 8000,
  64      "can_reason": true,
  65      "reasoning_levels": [
  66        "none",
  67        "minimal",
  68        "low",
  69        "medium",
  70        "high",
  71        "xhigh"
  72      ],
  73      "default_reasoning_effort": "medium",
  74      "supports_attachments": true
  75    },
  76    {
  77      "id": "anthropic/claude-opus-4",
  78      "name": "Claude Opus 4",
  79      "cost_per_1m_in": 15,
  80      "cost_per_1m_out": 75,
  81      "cost_per_1m_in_cached": 1.5,
  82      "cost_per_1m_out_cached": 18.75,
  83      "context_window": 200000,
  84      "default_max_tokens": 8000,
  85      "can_reason": true,
  86      "reasoning_levels": [
  87        "none",
  88        "minimal",
  89        "low",
  90        "medium",
  91        "high",
  92        "xhigh"
  93      ],
  94      "default_reasoning_effort": "medium",
  95      "supports_attachments": true
  96    },
  97    {
  98      "id": "anthropic/claude-opus-4.1",
  99      "name": "Claude Opus 4.1",
 100      "cost_per_1m_in": 15,
 101      "cost_per_1m_out": 75,
 102      "cost_per_1m_in_cached": 1.5,
 103      "cost_per_1m_out_cached": 18.75,
 104      "context_window": 200000,
 105      "default_max_tokens": 8000,
 106      "can_reason": true,
 107      "reasoning_levels": [
 108        "none",
 109        "minimal",
 110        "low",
 111        "medium",
 112        "high",
 113        "xhigh"
 114      ],
 115      "default_reasoning_effort": "medium",
 116      "supports_attachments": true
 117    },
 118    {
 119      "id": "anthropic/claude-opus-4.5",
 120      "name": "Claude Opus 4.5",
 121      "cost_per_1m_in": 5,
 122      "cost_per_1m_out": 25,
 123      "cost_per_1m_in_cached": 0.5,
 124      "cost_per_1m_out_cached": 6.25,
 125      "context_window": 200000,
 126      "default_max_tokens": 8000,
 127      "can_reason": true,
 128      "reasoning_levels": [
 129        "none",
 130        "minimal",
 131        "low",
 132        "medium",
 133        "high",
 134        "xhigh"
 135      ],
 136      "default_reasoning_effort": "medium",
 137      "supports_attachments": true
 138    },
 139    {
 140      "id": "anthropic/claude-opus-4.6",
 141      "name": "Claude Opus 4.6",
 142      "cost_per_1m_in": 5,
 143      "cost_per_1m_out": 25,
 144      "cost_per_1m_in_cached": 0.5,
 145      "cost_per_1m_out_cached": 6.25,
 146      "context_window": 1000000,
 147      "default_max_tokens": 8000,
 148      "can_reason": true,
 149      "reasoning_levels": [
 150        "none",
 151        "minimal",
 152        "low",
 153        "medium",
 154        "high",
 155        "xhigh"
 156      ],
 157      "default_reasoning_effort": "medium",
 158      "supports_attachments": true
 159    },
 160    {
 161      "id": "anthropic/claude-opus-4.7",
 162      "name": "Claude Opus 4.7",
 163      "cost_per_1m_in": 5,
 164      "cost_per_1m_out": 25,
 165      "cost_per_1m_in_cached": 0.5,
 166      "cost_per_1m_out_cached": 6.25,
 167      "context_window": 1000000,
 168      "default_max_tokens": 8000,
 169      "can_reason": true,
 170      "reasoning_levels": [
 171        "none",
 172        "minimal",
 173        "low",
 174        "medium",
 175        "high",
 176        "xhigh"
 177      ],
 178      "default_reasoning_effort": "medium",
 179      "supports_attachments": true
 180    },
 181    {
 182      "id": "anthropic/claude-sonnet-4",
 183      "name": "Claude Sonnet 4",
 184      "cost_per_1m_in": 3,
 185      "cost_per_1m_out": 15,
 186      "cost_per_1m_in_cached": 0.3,
 187      "cost_per_1m_out_cached": 3.75,
 188      "context_window": 1000000,
 189      "default_max_tokens": 8000,
 190      "can_reason": true,
 191      "reasoning_levels": [
 192        "none",
 193        "minimal",
 194        "low",
 195        "medium",
 196        "high",
 197        "xhigh"
 198      ],
 199      "default_reasoning_effort": "medium",
 200      "supports_attachments": true
 201    },
 202    {
 203      "id": "anthropic/claude-sonnet-4.5",
 204      "name": "Claude Sonnet 4.5",
 205      "cost_per_1m_in": 3,
 206      "cost_per_1m_out": 15,
 207      "cost_per_1m_in_cached": 0.3,
 208      "cost_per_1m_out_cached": 3.75,
 209      "context_window": 1000000,
 210      "default_max_tokens": 8000,
 211      "can_reason": true,
 212      "reasoning_levels": [
 213        "none",
 214        "minimal",
 215        "low",
 216        "medium",
 217        "high",
 218        "xhigh"
 219      ],
 220      "default_reasoning_effort": "medium",
 221      "supports_attachments": true
 222    },
 223    {
 224      "id": "anthropic/claude-sonnet-4.6",
 225      "name": "Claude Sonnet 4.6",
 226      "cost_per_1m_in": 3,
 227      "cost_per_1m_out": 15,
 228      "cost_per_1m_in_cached": 0.3,
 229      "cost_per_1m_out_cached": 3.75,
 230      "context_window": 1000000,
 231      "default_max_tokens": 8000,
 232      "can_reason": true,
 233      "reasoning_levels": [
 234        "none",
 235        "minimal",
 236        "low",
 237        "medium",
 238        "high",
 239        "xhigh"
 240      ],
 241      "default_reasoning_effort": "medium",
 242      "supports_attachments": true
 243    },
 244    {
 245      "id": "cohere/command-a",
 246      "name": "Command A",
 247      "cost_per_1m_in": 2.5,
 248      "cost_per_1m_out": 10,
 249      "cost_per_1m_in_cached": 0,
 250      "cost_per_1m_out_cached": 0,
 251      "context_window": 256000,
 252      "default_max_tokens": 8000,
 253      "can_reason": false,
 254      "supports_attachments": false
 255    },
 256    {
 257      "id": "deepseek/deepseek-v3",
 258      "name": "DeepSeek V3 0324",
 259      "cost_per_1m_in": 0.77,
 260      "cost_per_1m_out": 0.77,
 261      "cost_per_1m_in_cached": 0,
 262      "cost_per_1m_out_cached": 0,
 263      "context_window": 163840,
 264      "default_max_tokens": 8000,
 265      "can_reason": false,
 266      "supports_attachments": false
 267    },
 268    {
 269      "id": "deepseek/deepseek-v3.1-terminus",
 270      "name": "DeepSeek V3.1 Terminus",
 271      "cost_per_1m_in": 0.27,
 272      "cost_per_1m_out": 1,
 273      "cost_per_1m_in_cached": 0.135,
 274      "cost_per_1m_out_cached": 0,
 275      "context_window": 131072,
 276      "default_max_tokens": 8000,
 277      "can_reason": true,
 278      "reasoning_levels": [
 279        "low",
 280        "medium",
 281        "high"
 282      ],
 283      "default_reasoning_effort": "medium",
 284      "supports_attachments": false
 285    },
 286    {
 287      "id": "deepseek/deepseek-v3.2",
 288      "name": "DeepSeek V3.2",
 289      "cost_per_1m_in": 0.28,
 290      "cost_per_1m_out": 0.42,
 291      "cost_per_1m_in_cached": 0.028,
 292      "cost_per_1m_out_cached": 0,
 293      "context_window": 128000,
 294      "default_max_tokens": 8000,
 295      "can_reason": false,
 296      "supports_attachments": false
 297    },
 298    {
 299      "id": "deepseek/deepseek-v3.2-thinking",
 300      "name": "DeepSeek V3.2 Thinking",
 301      "cost_per_1m_in": 0.62,
 302      "cost_per_1m_out": 1.85,
 303      "cost_per_1m_in_cached": 0,
 304      "cost_per_1m_out_cached": 0,
 305      "context_window": 128000,
 306      "default_max_tokens": 8000,
 307      "can_reason": false,
 308      "supports_attachments": false
 309    },
 310    {
 311      "id": "deepseek/deepseek-v4-flash",
 312      "name": "DeepSeek V4 Flash",
 313      "cost_per_1m_in": 0.14,
 314      "cost_per_1m_out": 0.28,
 315      "cost_per_1m_in_cached": 0.0028,
 316      "cost_per_1m_out_cached": 0,
 317      "context_window": 1000000,
 318      "default_max_tokens": 8000,
 319      "can_reason": true,
 320      "reasoning_levels": [
 321        "low",
 322        "medium",
 323        "high"
 324      ],
 325      "default_reasoning_effort": "medium",
 326      "supports_attachments": false
 327    },
 328    {
 329      "id": "deepseek/deepseek-v4-pro",
 330      "name": "DeepSeek V4 Pro",
 331      "cost_per_1m_in": 0.435,
 332      "cost_per_1m_out": 0.87,
 333      "cost_per_1m_in_cached": 0.0036,
 334      "cost_per_1m_out_cached": 0,
 335      "context_window": 1000000,
 336      "default_max_tokens": 8000,
 337      "can_reason": true,
 338      "reasoning_levels": [
 339        "low",
 340        "medium",
 341        "high"
 342      ],
 343      "default_reasoning_effort": "medium",
 344      "supports_attachments": false
 345    },
 346    {
 347      "id": "deepseek/deepseek-r1",
 348      "name": "DeepSeek-R1",
 349      "cost_per_1m_in": 1.35,
 350      "cost_per_1m_out": 5.4,
 351      "cost_per_1m_in_cached": 0,
 352      "cost_per_1m_out_cached": 0,
 353      "context_window": 128000,
 354      "default_max_tokens": 8000,
 355      "can_reason": true,
 356      "reasoning_levels": [
 357        "low",
 358        "medium",
 359        "high"
 360      ],
 361      "default_reasoning_effort": "medium",
 362      "supports_attachments": false
 363    },
 364    {
 365      "id": "deepseek/deepseek-v3.1",
 366      "name": "DeepSeek-V3.1",
 367      "cost_per_1m_in": 0.56,
 368      "cost_per_1m_out": 1.68,
 369      "cost_per_1m_in_cached": 0.28,
 370      "cost_per_1m_out_cached": 0,
 371      "context_window": 163840,
 372      "default_max_tokens": 8000,
 373      "can_reason": true,
 374      "reasoning_levels": [
 375        "low",
 376        "medium",
 377        "high"
 378      ],
 379      "default_reasoning_effort": "medium",
 380      "supports_attachments": false
 381    },
 382    {
 383      "id": "mistral/devstral-2",
 384      "name": "Devstral 2",
 385      "cost_per_1m_in": 0.4,
 386      "cost_per_1m_out": 2,
 387      "cost_per_1m_in_cached": 0,
 388      "cost_per_1m_out_cached": 0,
 389      "context_window": 256000,
 390      "default_max_tokens": 8000,
 391      "can_reason": false,
 392      "supports_attachments": false
 393    },
 394    {
 395      "id": "mistral/devstral-small",
 396      "name": "Devstral Small 1.1",
 397      "cost_per_1m_in": 0.1,
 398      "cost_per_1m_out": 0.3,
 399      "cost_per_1m_in_cached": 0,
 400      "cost_per_1m_out_cached": 0,
 401      "context_window": 128000,
 402      "default_max_tokens": 8000,
 403      "can_reason": false,
 404      "supports_attachments": false
 405    },
 406    {
 407      "id": "mistral/devstral-small-2",
 408      "name": "Devstral Small 2",
 409      "cost_per_1m_in": 0.1,
 410      "cost_per_1m_out": 0.3,
 411      "cost_per_1m_in_cached": 0,
 412      "cost_per_1m_out_cached": 0,
 413      "context_window": 256000,
 414      "default_max_tokens": 8000,
 415      "can_reason": false,
 416      "supports_attachments": false
 417    },
 418    {
 419      "id": "zai/glm-4.5-air",
 420      "name": "GLM 4.5 Air",
 421      "cost_per_1m_in": 0.2,
 422      "cost_per_1m_out": 1.1,
 423      "cost_per_1m_in_cached": 0.03,
 424      "cost_per_1m_out_cached": 0,
 425      "context_window": 128000,
 426      "default_max_tokens": 8000,
 427      "can_reason": true,
 428      "reasoning_levels": [
 429        "low",
 430        "medium",
 431        "high"
 432      ],
 433      "default_reasoning_effort": "medium",
 434      "supports_attachments": false
 435    },
 436    {
 437      "id": "zai/glm-4.5v",
 438      "name": "GLM 4.5V",
 439      "cost_per_1m_in": 0.6,
 440      "cost_per_1m_out": 1.8,
 441      "cost_per_1m_in_cached": 0.11,
 442      "cost_per_1m_out_cached": 0,
 443      "context_window": 66000,
 444      "default_max_tokens": 8000,
 445      "can_reason": false,
 446      "supports_attachments": true
 447    },
 448    {
 449      "id": "zai/glm-4.6",
 450      "name": "GLM 4.6",
 451      "cost_per_1m_in": 0.6,
 452      "cost_per_1m_out": 2.2,
 453      "cost_per_1m_in_cached": 0.11,
 454      "cost_per_1m_out_cached": 0,
 455      "context_window": 200000,
 456      "default_max_tokens": 8000,
 457      "can_reason": true,
 458      "reasoning_levels": [
 459        "low",
 460        "medium",
 461        "high"
 462      ],
 463      "default_reasoning_effort": "medium",
 464      "supports_attachments": false
 465    },
 466    {
 467      "id": "zai/glm-4.7",
 468      "name": "GLM 4.7",
 469      "cost_per_1m_in": 2.25,
 470      "cost_per_1m_out": 2.75,
 471      "cost_per_1m_in_cached": 2.25,
 472      "cost_per_1m_out_cached": 0,
 473      "context_window": 131000,
 474      "default_max_tokens": 8000,
 475      "can_reason": true,
 476      "reasoning_levels": [
 477        "low",
 478        "medium",
 479        "high"
 480      ],
 481      "default_reasoning_effort": "medium",
 482      "supports_attachments": false
 483    },
 484    {
 485      "id": "zai/glm-4.7-flash",
 486      "name": "GLM 4.7 Flash",
 487      "cost_per_1m_in": 0.07,
 488      "cost_per_1m_out": 0.4,
 489      "cost_per_1m_in_cached": 0,
 490      "cost_per_1m_out_cached": 0,
 491      "context_window": 200000,
 492      "default_max_tokens": 8000,
 493      "can_reason": true,
 494      "reasoning_levels": [
 495        "low",
 496        "medium",
 497        "high"
 498      ],
 499      "default_reasoning_effort": "medium",
 500      "supports_attachments": false
 501    },
 502    {
 503      "id": "zai/glm-4.7-flashx",
 504      "name": "GLM 4.7 FlashX",
 505      "cost_per_1m_in": 0.06,
 506      "cost_per_1m_out": 0.4,
 507      "cost_per_1m_in_cached": 0.01,
 508      "cost_per_1m_out_cached": 0,
 509      "context_window": 200000,
 510      "default_max_tokens": 8000,
 511      "can_reason": true,
 512      "reasoning_levels": [
 513        "low",
 514        "medium",
 515        "high"
 516      ],
 517      "default_reasoning_effort": "medium",
 518      "supports_attachments": false
 519    },
 520    {
 521      "id": "zai/glm-5",
 522      "name": "GLM 5",
 523      "cost_per_1m_in": 1,
 524      "cost_per_1m_out": 3.2,
 525      "cost_per_1m_in_cached": 0.2,
 526      "cost_per_1m_out_cached": 0,
 527      "context_window": 202800,
 528      "default_max_tokens": 8000,
 529      "can_reason": true,
 530      "reasoning_levels": [
 531        "low",
 532        "medium",
 533        "high"
 534      ],
 535      "default_reasoning_effort": "medium",
 536      "supports_attachments": false
 537    },
 538    {
 539      "id": "zai/glm-5-turbo",
 540      "name": "GLM 5 Turbo",
 541      "cost_per_1m_in": 1.2,
 542      "cost_per_1m_out": 4,
 543      "cost_per_1m_in_cached": 0.24,
 544      "cost_per_1m_out_cached": 0,
 545      "context_window": 202800,
 546      "default_max_tokens": 8000,
 547      "can_reason": true,
 548      "reasoning_levels": [
 549        "low",
 550        "medium",
 551        "high"
 552      ],
 553      "default_reasoning_effort": "medium",
 554      "supports_attachments": false
 555    },
 556    {
 557      "id": "zai/glm-5.1",
 558      "name": "GLM 5.1",
 559      "cost_per_1m_in": 1.4,
 560      "cost_per_1m_out": 4.4,
 561      "cost_per_1m_in_cached": 0.26,
 562      "cost_per_1m_out_cached": 0,
 563      "context_window": 202800,
 564      "default_max_tokens": 8000,
 565      "can_reason": true,
 566      "reasoning_levels": [
 567        "low",
 568        "medium",
 569        "high"
 570      ],
 571      "default_reasoning_effort": "medium",
 572      "supports_attachments": false
 573    },
 574    {
 575      "id": "zai/glm-5v-turbo",
 576      "name": "GLM 5V Turbo",
 577      "cost_per_1m_in": 1.2,
 578      "cost_per_1m_out": 4,
 579      "cost_per_1m_in_cached": 0.24,
 580      "cost_per_1m_out_cached": 0,
 581      "context_window": 200000,
 582      "default_max_tokens": 8000,
 583      "can_reason": true,
 584      "reasoning_levels": [
 585        "low",
 586        "medium",
 587        "high"
 588      ],
 589      "default_reasoning_effort": "medium",
 590      "supports_attachments": true
 591    },
 592    {
 593      "id": "zai/glm-4.5",
 594      "name": "GLM-4.5",
 595      "cost_per_1m_in": 0.6,
 596      "cost_per_1m_out": 2.2,
 597      "cost_per_1m_in_cached": 0.11,
 598      "cost_per_1m_out_cached": 0,
 599      "context_window": 128000,
 600      "default_max_tokens": 8000,
 601      "can_reason": true,
 602      "reasoning_levels": [
 603        "low",
 604        "medium",
 605        "high"
 606      ],
 607      "default_reasoning_effort": "medium",
 608      "supports_attachments": false
 609    },
 610    {
 611      "id": "zai/glm-4.6v",
 612      "name": "GLM-4.6V",
 613      "cost_per_1m_in": 0.3,
 614      "cost_per_1m_out": 0.9,
 615      "cost_per_1m_in_cached": 0.05,
 616      "cost_per_1m_out_cached": 0,
 617      "context_window": 128000,
 618      "default_max_tokens": 8000,
 619      "can_reason": true,
 620      "reasoning_levels": [
 621        "low",
 622        "medium",
 623        "high"
 624      ],
 625      "default_reasoning_effort": "medium",
 626      "supports_attachments": true
 627    },
 628    {
 629      "id": "zai/glm-4.6v-flash",
 630      "name": "GLM-4.6V-Flash",
 631      "cost_per_1m_in": 0,
 632      "cost_per_1m_out": 0,
 633      "cost_per_1m_in_cached": 0,
 634      "cost_per_1m_out_cached": 0,
 635      "context_window": 128000,
 636      "default_max_tokens": 8000,
 637      "can_reason": true,
 638      "reasoning_levels": [
 639        "low",
 640        "medium",
 641        "high"
 642      ],
 643      "default_reasoning_effort": "medium",
 644      "supports_attachments": true
 645    },
 646    {
 647      "id": "openai/gpt-5-chat",
 648      "name": "GPT 5 Chat",
 649      "cost_per_1m_in": 1.25,
 650      "cost_per_1m_out": 10,
 651      "cost_per_1m_in_cached": 0.125,
 652      "cost_per_1m_out_cached": 0,
 653      "context_window": 128000,
 654      "default_max_tokens": 8000,
 655      "can_reason": true,
 656      "reasoning_levels": [
 657        "low",
 658        "medium",
 659        "high"
 660      ],
 661      "default_reasoning_effort": "medium",
 662      "supports_attachments": true
 663    },
 664    {
 665      "id": "openai/gpt-5.1-codex-max",
 666      "name": "GPT 5.1 Codex Max",
 667      "cost_per_1m_in": 1.25,
 668      "cost_per_1m_out": 10,
 669      "cost_per_1m_in_cached": 0.125,
 670      "cost_per_1m_out_cached": 0,
 671      "context_window": 400000,
 672      "default_max_tokens": 8000,
 673      "can_reason": true,
 674      "reasoning_levels": [
 675        "low",
 676        "medium",
 677        "high"
 678      ],
 679      "default_reasoning_effort": "medium",
 680      "supports_attachments": true
 681    },
 682    {
 683      "id": "openai/gpt-5.1-codex-mini",
 684      "name": "GPT 5.1 Codex Mini",
 685      "cost_per_1m_in": 0.25,
 686      "cost_per_1m_out": 2,
 687      "cost_per_1m_in_cached": 0.025,
 688      "cost_per_1m_out_cached": 0,
 689      "context_window": 400000,
 690      "default_max_tokens": 8000,
 691      "can_reason": true,
 692      "reasoning_levels": [
 693        "low",
 694        "medium",
 695        "high"
 696      ],
 697      "default_reasoning_effort": "medium",
 698      "supports_attachments": true
 699    },
 700    {
 701      "id": "openai/gpt-5.1-thinking",
 702      "name": "GPT 5.1 Thinking",
 703      "cost_per_1m_in": 1.25,
 704      "cost_per_1m_out": 10,
 705      "cost_per_1m_in_cached": 0.125,
 706      "cost_per_1m_out_cached": 0,
 707      "context_window": 400000,
 708      "default_max_tokens": 8000,
 709      "can_reason": true,
 710      "reasoning_levels": [
 711        "low",
 712        "medium",
 713        "high"
 714      ],
 715      "default_reasoning_effort": "medium",
 716      "supports_attachments": true
 717    },
 718    {
 719      "id": "openai/gpt-5.2",
 720      "name": "GPT 5.2",
 721      "cost_per_1m_in": 1.75,
 722      "cost_per_1m_out": 14,
 723      "cost_per_1m_in_cached": 0.175,
 724      "cost_per_1m_out_cached": 0,
 725      "context_window": 400000,
 726      "default_max_tokens": 8000,
 727      "can_reason": true,
 728      "reasoning_levels": [
 729        "low",
 730        "medium",
 731        "high"
 732      ],
 733      "default_reasoning_effort": "medium",
 734      "supports_attachments": true
 735    },
 736    {
 737      "id": "openai/gpt-5.2-pro",
 738      "name": "GPT 5.2 ",
 739      "cost_per_1m_in": 21,
 740      "cost_per_1m_out": 168,
 741      "cost_per_1m_in_cached": 0,
 742      "cost_per_1m_out_cached": 0,
 743      "context_window": 400000,
 744      "default_max_tokens": 8000,
 745      "can_reason": true,
 746      "reasoning_levels": [
 747        "low",
 748        "medium",
 749        "high"
 750      ],
 751      "default_reasoning_effort": "medium",
 752      "supports_attachments": true
 753    },
 754    {
 755      "id": "openai/gpt-5.2-chat",
 756      "name": "GPT 5.2 Chat",
 757      "cost_per_1m_in": 1.75,
 758      "cost_per_1m_out": 14,
 759      "cost_per_1m_in_cached": 0.175,
 760      "cost_per_1m_out_cached": 0,
 761      "context_window": 128000,
 762      "default_max_tokens": 8000,
 763      "can_reason": true,
 764      "reasoning_levels": [
 765        "low",
 766        "medium",
 767        "high"
 768      ],
 769      "default_reasoning_effort": "medium",
 770      "supports_attachments": true
 771    },
 772    {
 773      "id": "openai/gpt-5.2-codex",
 774      "name": "GPT 5.2 Codex",
 775      "cost_per_1m_in": 1.75,
 776      "cost_per_1m_out": 14,
 777      "cost_per_1m_in_cached": 0.175,
 778      "cost_per_1m_out_cached": 0,
 779      "context_window": 400000,
 780      "default_max_tokens": 8000,
 781      "can_reason": true,
 782      "reasoning_levels": [
 783        "low",
 784        "medium",
 785        "high"
 786      ],
 787      "default_reasoning_effort": "medium",
 788      "supports_attachments": true
 789    },
 790    {
 791      "id": "openai/gpt-5.3-codex",
 792      "name": "GPT 5.3 Codex",
 793      "cost_per_1m_in": 1.75,
 794      "cost_per_1m_out": 14,
 795      "cost_per_1m_in_cached": 0.175,
 796      "cost_per_1m_out_cached": 0,
 797      "context_window": 400000,
 798      "default_max_tokens": 8000,
 799      "can_reason": true,
 800      "reasoning_levels": [
 801        "low",
 802        "medium",
 803        "high"
 804      ],
 805      "default_reasoning_effort": "medium",
 806      "supports_attachments": true
 807    },
 808    {
 809      "id": "openai/gpt-5.4",
 810      "name": "GPT 5.4",
 811      "cost_per_1m_in": 2.5,
 812      "cost_per_1m_out": 15,
 813      "cost_per_1m_in_cached": 0.25,
 814      "cost_per_1m_out_cached": 0,
 815      "context_window": 1050000,
 816      "default_max_tokens": 8000,
 817      "can_reason": true,
 818      "reasoning_levels": [
 819        "low",
 820        "medium",
 821        "high"
 822      ],
 823      "default_reasoning_effort": "medium",
 824      "supports_attachments": true
 825    },
 826    {
 827      "id": "openai/gpt-5.4-mini",
 828      "name": "GPT 5.4 Mini",
 829      "cost_per_1m_in": 0.75,
 830      "cost_per_1m_out": 4.5,
 831      "cost_per_1m_in_cached": 0.075,
 832      "cost_per_1m_out_cached": 0,
 833      "context_window": 400000,
 834      "default_max_tokens": 8000,
 835      "can_reason": true,
 836      "reasoning_levels": [
 837        "low",
 838        "medium",
 839        "high"
 840      ],
 841      "default_reasoning_effort": "medium",
 842      "supports_attachments": true
 843    },
 844    {
 845      "id": "openai/gpt-5.4-nano",
 846      "name": "GPT 5.4 Nano",
 847      "cost_per_1m_in": 0.2,
 848      "cost_per_1m_out": 1.25,
 849      "cost_per_1m_in_cached": 0.02,
 850      "cost_per_1m_out_cached": 0,
 851      "context_window": 400000,
 852      "default_max_tokens": 8000,
 853      "can_reason": true,
 854      "reasoning_levels": [
 855        "low",
 856        "medium",
 857        "high"
 858      ],
 859      "default_reasoning_effort": "medium",
 860      "supports_attachments": true
 861    },
 862    {
 863      "id": "openai/gpt-5.4-pro",
 864      "name": "GPT 5.4 Pro",
 865      "cost_per_1m_in": 30,
 866      "cost_per_1m_out": 180,
 867      "cost_per_1m_in_cached": 0,
 868      "cost_per_1m_out_cached": 0,
 869      "context_window": 1050000,
 870      "default_max_tokens": 8000,
 871      "can_reason": true,
 872      "reasoning_levels": [
 873        "low",
 874        "medium",
 875        "high"
 876      ],
 877      "default_reasoning_effort": "medium",
 878      "supports_attachments": true
 879    },
 880    {
 881      "id": "openai/gpt-5.5",
 882      "name": "GPT 5.5",
 883      "cost_per_1m_in": 5,
 884      "cost_per_1m_out": 30,
 885      "cost_per_1m_in_cached": 0.5,
 886      "cost_per_1m_out_cached": 0,
 887      "context_window": 1000000,
 888      "default_max_tokens": 8000,
 889      "can_reason": true,
 890      "reasoning_levels": [
 891        "low",
 892        "medium",
 893        "high"
 894      ],
 895      "default_reasoning_effort": "medium",
 896      "supports_attachments": true
 897    },
 898    {
 899      "id": "openai/gpt-5.5-pro",
 900      "name": "GPT 5.5 Pro",
 901      "cost_per_1m_in": 30,
 902      "cost_per_1m_out": 180,
 903      "cost_per_1m_in_cached": 0,
 904      "cost_per_1m_out_cached": 0,
 905      "context_window": 1000000,
 906      "default_max_tokens": 8000,
 907      "can_reason": true,
 908      "reasoning_levels": [
 909        "low",
 910        "medium",
 911        "high"
 912      ],
 913      "default_reasoning_effort": "medium",
 914      "supports_attachments": true
 915    },
 916    {
 917      "id": "openai/gpt-oss-20b",
 918      "name": "GPT OSS 120B",
 919      "cost_per_1m_in": 0.05,
 920      "cost_per_1m_out": 0.2,
 921      "cost_per_1m_in_cached": 0,
 922      "cost_per_1m_out_cached": 0,
 923      "context_window": 131072,
 924      "default_max_tokens": 8000,
 925      "can_reason": true,
 926      "reasoning_levels": [
 927        "low",
 928        "medium",
 929        "high"
 930      ],
 931      "default_reasoning_effort": "medium",
 932      "supports_attachments": false
 933    },
 934    {
 935      "id": "openai/gpt-oss-safeguard-20b",
 936      "name": "GPT OSS Safeguard 20B",
 937      "cost_per_1m_in": 0.075,
 938      "cost_per_1m_out": 0.3,
 939      "cost_per_1m_in_cached": 0.037,
 940      "cost_per_1m_out_cached": 0,
 941      "context_window": 131072,
 942      "default_max_tokens": 8000,
 943      "can_reason": true,
 944      "reasoning_levels": [
 945        "low",
 946        "medium",
 947        "high"
 948      ],
 949      "default_reasoning_effort": "medium",
 950      "supports_attachments": false
 951    },
 952    {
 953      "id": "openai/gpt-4-turbo",
 954      "name": "GPT-4 Turbo",
 955      "cost_per_1m_in": 10,
 956      "cost_per_1m_out": 30,
 957      "cost_per_1m_in_cached": 0,
 958      "cost_per_1m_out_cached": 0,
 959      "context_window": 128000,
 960      "default_max_tokens": 4096,
 961      "can_reason": false,
 962      "supports_attachments": true
 963    },
 964    {
 965      "id": "openai/gpt-4.1",
 966      "name": "GPT-4.1",
 967      "cost_per_1m_in": 2,
 968      "cost_per_1m_out": 8,
 969      "cost_per_1m_in_cached": 0.5,
 970      "cost_per_1m_out_cached": 0,
 971      "context_window": 1047576,
 972      "default_max_tokens": 8000,
 973      "can_reason": false,
 974      "supports_attachments": true
 975    },
 976    {
 977      "id": "openai/gpt-4.1-mini",
 978      "name": "GPT-4.1 mini",
 979      "cost_per_1m_in": 0.4,
 980      "cost_per_1m_out": 1.6,
 981      "cost_per_1m_in_cached": 0.1,
 982      "cost_per_1m_out_cached": 0,
 983      "context_window": 1047576,
 984      "default_max_tokens": 8000,
 985      "can_reason": false,
 986      "supports_attachments": true
 987    },
 988    {
 989      "id": "openai/gpt-4.1-nano",
 990      "name": "GPT-4.1 nano",
 991      "cost_per_1m_in": 0.1,
 992      "cost_per_1m_out": 0.4,
 993      "cost_per_1m_in_cached": 0.025,
 994      "cost_per_1m_out_cached": 0,
 995      "context_window": 1047576,
 996      "default_max_tokens": 8000,
 997      "can_reason": false,
 998      "supports_attachments": true
 999    },
1000    {
1001      "id": "openai/gpt-4o",
1002      "name": "GPT-4o",
1003      "cost_per_1m_in": 2.5,
1004      "cost_per_1m_out": 10,
1005      "cost_per_1m_in_cached": 1.25,
1006      "cost_per_1m_out_cached": 0,
1007      "context_window": 128000,
1008      "default_max_tokens": 8000,
1009      "can_reason": false,
1010      "supports_attachments": true
1011    },
1012    {
1013      "id": "openai/gpt-4o-mini",
1014      "name": "GPT-4o mini",
1015      "cost_per_1m_in": 0.15,
1016      "cost_per_1m_out": 0.6,
1017      "cost_per_1m_in_cached": 0.075,
1018      "cost_per_1m_out_cached": 0,
1019      "context_window": 128000,
1020      "default_max_tokens": 8000,
1021      "can_reason": false,
1022      "supports_attachments": true
1023    },
1024    {
1025      "id": "openai/gpt-5",
1026      "name": "GPT-5",
1027      "cost_per_1m_in": 1.25,
1028      "cost_per_1m_out": 10,
1029      "cost_per_1m_in_cached": 0.125,
1030      "cost_per_1m_out_cached": 0,
1031      "context_window": 400000,
1032      "default_max_tokens": 8000,
1033      "can_reason": true,
1034      "reasoning_levels": [
1035        "low",
1036        "medium",
1037        "high"
1038      ],
1039      "default_reasoning_effort": "medium",
1040      "supports_attachments": true
1041    },
1042    {
1043      "id": "openai/gpt-5-mini",
1044      "name": "GPT-5 mini",
1045      "cost_per_1m_in": 0.25,
1046      "cost_per_1m_out": 2,
1047      "cost_per_1m_in_cached": 0.025,
1048      "cost_per_1m_out_cached": 0,
1049      "context_window": 400000,
1050      "default_max_tokens": 8000,
1051      "can_reason": true,
1052      "reasoning_levels": [
1053        "low",
1054        "medium",
1055        "high"
1056      ],
1057      "default_reasoning_effort": "medium",
1058      "supports_attachments": true
1059    },
1060    {
1061      "id": "openai/gpt-5-nano",
1062      "name": "GPT-5 nano",
1063      "cost_per_1m_in": 0.05,
1064      "cost_per_1m_out": 0.4,
1065      "cost_per_1m_in_cached": 0.005,
1066      "cost_per_1m_out_cached": 0,
1067      "context_window": 400000,
1068      "default_max_tokens": 8000,
1069      "can_reason": true,
1070      "reasoning_levels": [
1071        "low",
1072        "medium",
1073        "high"
1074      ],
1075      "default_reasoning_effort": "medium",
1076      "supports_attachments": true
1077    },
1078    {
1079      "id": "openai/gpt-5-pro",
1080      "name": "GPT-5 pro",
1081      "cost_per_1m_in": 15,
1082      "cost_per_1m_out": 120,
1083      "cost_per_1m_in_cached": 0,
1084      "cost_per_1m_out_cached": 0,
1085      "context_window": 400000,
1086      "default_max_tokens": 8000,
1087      "can_reason": true,
1088      "reasoning_levels": [
1089        "low",
1090        "medium",
1091        "high"
1092      ],
1093      "default_reasoning_effort": "medium",
1094      "supports_attachments": true
1095    },
1096    {
1097      "id": "openai/gpt-5-codex",
1098      "name": "GPT-5-Codex",
1099      "cost_per_1m_in": 1.25,
1100      "cost_per_1m_out": 10,
1101      "cost_per_1m_in_cached": 0.125,
1102      "cost_per_1m_out_cached": 0,
1103      "context_window": 400000,
1104      "default_max_tokens": 8000,
1105      "can_reason": true,
1106      "reasoning_levels": [
1107        "low",
1108        "medium",
1109        "high"
1110      ],
1111      "default_reasoning_effort": "medium",
1112      "supports_attachments": false
1113    },
1114    {
1115      "id": "openai/gpt-5.1-instant",
1116      "name": "GPT-5.1 Instant",
1117      "cost_per_1m_in": 1.25,
1118      "cost_per_1m_out": 10,
1119      "cost_per_1m_in_cached": 0.125,
1120      "cost_per_1m_out_cached": 0,
1121      "context_window": 128000,
1122      "default_max_tokens": 8000,
1123      "can_reason": true,
1124      "reasoning_levels": [
1125        "low",
1126        "medium",
1127        "high"
1128      ],
1129      "default_reasoning_effort": "medium",
1130      "supports_attachments": true
1131    },
1132    {
1133      "id": "openai/gpt-5.1-codex",
1134      "name": "GPT-5.1-Codex",
1135      "cost_per_1m_in": 1.25,
1136      "cost_per_1m_out": 10,
1137      "cost_per_1m_in_cached": 0.125,
1138      "cost_per_1m_out_cached": 0,
1139      "context_window": 400000,
1140      "default_max_tokens": 8000,
1141      "can_reason": true,
1142      "reasoning_levels": [
1143        "low",
1144        "medium",
1145        "high"
1146      ],
1147      "default_reasoning_effort": "medium",
1148      "supports_attachments": true
1149    },
1150    {
1151      "id": "openai/gpt-5.3-chat",
1152      "name": "GPT-5.3 Chat",
1153      "cost_per_1m_in": 1.75,
1154      "cost_per_1m_out": 14,
1155      "cost_per_1m_in_cached": 0.175,
1156      "cost_per_1m_out_cached": 0,
1157      "context_window": 128000,
1158      "default_max_tokens": 8000,
1159      "can_reason": true,
1160      "reasoning_levels": [
1161        "low",
1162        "medium",
1163        "high"
1164      ],
1165      "default_reasoning_effort": "medium",
1166      "supports_attachments": true
1167    },
1168    {
1169      "id": "google/gemini-2.0-flash",
1170      "name": "Gemini 2.0 Flash",
1171      "cost_per_1m_in": 0.15,
1172      "cost_per_1m_out": 0.6,
1173      "cost_per_1m_in_cached": 0.025,
1174      "cost_per_1m_out_cached": 0,
1175      "context_window": 1048576,
1176      "default_max_tokens": 8000,
1177      "can_reason": false,
1178      "supports_attachments": true
1179    },
1180    {
1181      "id": "google/gemini-2.0-flash-lite",
1182      "name": "Gemini 2.0 Flash Lite",
1183      "cost_per_1m_in": 0.075,
1184      "cost_per_1m_out": 0.3,
1185      "cost_per_1m_in_cached": 0.02,
1186      "cost_per_1m_out_cached": 0,
1187      "context_window": 1048576,
1188      "default_max_tokens": 8000,
1189      "can_reason": false,
1190      "supports_attachments": true
1191    },
1192    {
1193      "id": "google/gemini-2.5-flash",
1194      "name": "Gemini 2.5 Flash",
1195      "cost_per_1m_in": 0.3,
1196      "cost_per_1m_out": 2.5,
1197      "cost_per_1m_in_cached": 0.03,
1198      "cost_per_1m_out_cached": 0,
1199      "context_window": 1000000,
1200      "default_max_tokens": 8000,
1201      "can_reason": true,
1202      "reasoning_levels": [
1203        "low",
1204        "medium",
1205        "high"
1206      ],
1207      "default_reasoning_effort": "medium",
1208      "supports_attachments": true
1209    },
1210    {
1211      "id": "google/gemini-2.5-flash-lite",
1212      "name": "Gemini 2.5 Flash Lite",
1213      "cost_per_1m_in": 0.1,
1214      "cost_per_1m_out": 0.4,
1215      "cost_per_1m_in_cached": 0.01,
1216      "cost_per_1m_out_cached": 0,
1217      "context_window": 1048576,
1218      "default_max_tokens": 8000,
1219      "can_reason": true,
1220      "reasoning_levels": [
1221        "low",
1222        "medium",
1223        "high"
1224      ],
1225      "default_reasoning_effort": "medium",
1226      "supports_attachments": true
1227    },
1228    {
1229      "id": "google/gemini-2.5-pro",
1230      "name": "Gemini 2.5 Pro",
1231      "cost_per_1m_in": 1.25,
1232      "cost_per_1m_out": 10,
1233      "cost_per_1m_in_cached": 0.125,
1234      "cost_per_1m_out_cached": 0,
1235      "context_window": 1048576,
1236      "default_max_tokens": 8000,
1237      "can_reason": true,
1238      "reasoning_levels": [
1239        "low",
1240        "medium",
1241        "high"
1242      ],
1243      "default_reasoning_effort": "medium",
1244      "supports_attachments": true
1245    },
1246    {
1247      "id": "google/gemini-3-flash",
1248      "name": "Gemini 3 Flash",
1249      "cost_per_1m_in": 0.5,
1250      "cost_per_1m_out": 3,
1251      "cost_per_1m_in_cached": 0.05,
1252      "cost_per_1m_out_cached": 0,
1253      "context_window": 1000000,
1254      "default_max_tokens": 8000,
1255      "can_reason": true,
1256      "reasoning_levels": [
1257        "low",
1258        "medium",
1259        "high"
1260      ],
1261      "default_reasoning_effort": "medium",
1262      "supports_attachments": true
1263    },
1264    {
1265      "id": "google/gemini-3-pro-preview",
1266      "name": "Gemini 3 Pro Preview",
1267      "cost_per_1m_in": 2,
1268      "cost_per_1m_out": 12,
1269      "cost_per_1m_in_cached": 0.2,
1270      "cost_per_1m_out_cached": 0,
1271      "context_window": 1000000,
1272      "default_max_tokens": 8000,
1273      "can_reason": true,
1274      "reasoning_levels": [
1275        "low",
1276        "medium",
1277        "high"
1278      ],
1279      "default_reasoning_effort": "medium",
1280      "supports_attachments": true
1281    },
1282    {
1283      "id": "google/gemini-3.1-flash-lite",
1284      "name": "Gemini 3.1 Flash Lite",
1285      "cost_per_1m_in": 0.25,
1286      "cost_per_1m_out": 1.5,
1287      "cost_per_1m_in_cached": 0.03,
1288      "cost_per_1m_out_cached": 0,
1289      "context_window": 1000000,
1290      "default_max_tokens": 8000,
1291      "can_reason": true,
1292      "reasoning_levels": [
1293        "low",
1294        "medium",
1295        "high"
1296      ],
1297      "default_reasoning_effort": "medium",
1298      "supports_attachments": true
1299    },
1300    {
1301      "id": "google/gemini-3.1-flash-lite-preview",
1302      "name": "Gemini 3.1 Flash Lite Preview",
1303      "cost_per_1m_in": 0.25,
1304      "cost_per_1m_out": 1.5,
1305      "cost_per_1m_in_cached": 0.03,
1306      "cost_per_1m_out_cached": 0,
1307      "context_window": 1000000,
1308      "default_max_tokens": 8000,
1309      "can_reason": true,
1310      "reasoning_levels": [
1311        "low",
1312        "medium",
1313        "high"
1314      ],
1315      "default_reasoning_effort": "medium",
1316      "supports_attachments": true
1317    },
1318    {
1319      "id": "google/gemini-3.1-pro-preview",
1320      "name": "Gemini 3.1 Pro Preview",
1321      "cost_per_1m_in": 2,
1322      "cost_per_1m_out": 12,
1323      "cost_per_1m_in_cached": 0.2,
1324      "cost_per_1m_out_cached": 0,
1325      "context_window": 1000000,
1326      "default_max_tokens": 8000,
1327      "can_reason": true,
1328      "reasoning_levels": [
1329        "low",
1330        "medium",
1331        "high"
1332      ],
1333      "default_reasoning_effort": "medium",
1334      "supports_attachments": true
1335    },
1336    {
1337      "id": "google/gemma-4-26b-a4b-it",
1338      "name": "Gemma 4 26B A4B IT",
1339      "cost_per_1m_in": 0.13,
1340      "cost_per_1m_out": 0.4,
1341      "cost_per_1m_in_cached": 0,
1342      "cost_per_1m_out_cached": 0,
1343      "context_window": 262144,
1344      "default_max_tokens": 8000,
1345      "can_reason": false,
1346      "supports_attachments": true
1347    },
1348    {
1349      "id": "google/gemma-4-31b-it",
1350      "name": "Gemma 4 31B IT",
1351      "cost_per_1m_in": 0.14,
1352      "cost_per_1m_out": 0.4,
1353      "cost_per_1m_in_cached": 0,
1354      "cost_per_1m_out_cached": 0,
1355      "context_window": 262144,
1356      "default_max_tokens": 8000,
1357      "can_reason": false,
1358      "supports_attachments": true
1359    },
1360    {
1361      "id": "xai/grok-3",
1362      "name": "Grok 3 Beta",
1363      "cost_per_1m_in": 3,
1364      "cost_per_1m_out": 15,
1365      "cost_per_1m_in_cached": 0.75,
1366      "cost_per_1m_out_cached": 0,
1367      "context_window": 131072,
1368      "default_max_tokens": 8000,
1369      "can_reason": false,
1370      "supports_attachments": false
1371    },
1372    {
1373      "id": "xai/grok-3-fast",
1374      "name": "Grok 3 Fast Beta",
1375      "cost_per_1m_in": 5,
1376      "cost_per_1m_out": 25,
1377      "cost_per_1m_in_cached": 1.25,
1378      "cost_per_1m_out_cached": 0,
1379      "context_window": 131072,
1380      "default_max_tokens": 8000,
1381      "can_reason": false,
1382      "supports_attachments": false
1383    },
1384    {
1385      "id": "xai/grok-3-mini",
1386      "name": "Grok 3 Mini Beta",
1387      "cost_per_1m_in": 0.3,
1388      "cost_per_1m_out": 0.5,
1389      "cost_per_1m_in_cached": 0.075,
1390      "cost_per_1m_out_cached": 0,
1391      "context_window": 131072,
1392      "default_max_tokens": 8000,
1393      "can_reason": false,
1394      "supports_attachments": false
1395    },
1396    {
1397      "id": "xai/grok-3-mini-fast",
1398      "name": "Grok 3 Mini Fast Beta",
1399      "cost_per_1m_in": 0.6,
1400      "cost_per_1m_out": 4,
1401      "cost_per_1m_in_cached": 0,
1402      "cost_per_1m_out_cached": 0,
1403      "context_window": 131072,
1404      "default_max_tokens": 8000,
1405      "can_reason": false,
1406      "supports_attachments": false
1407    },
1408    {
1409      "id": "xai/grok-4",
1410      "name": "Grok 4",
1411      "cost_per_1m_in": 3,
1412      "cost_per_1m_out": 15,
1413      "cost_per_1m_in_cached": 0.75,
1414      "cost_per_1m_out_cached": 0,
1415      "context_window": 256000,
1416      "default_max_tokens": 8000,
1417      "can_reason": true,
1418      "reasoning_levels": [
1419        "low",
1420        "medium",
1421        "high"
1422      ],
1423      "default_reasoning_effort": "medium",
1424      "supports_attachments": true
1425    },
1426    {
1427      "id": "xai/grok-4-fast-non-reasoning",
1428      "name": "Grok 4 Fast Non-Reasoning",
1429      "cost_per_1m_in": 0.2,
1430      "cost_per_1m_out": 0.5,
1431      "cost_per_1m_in_cached": 0.05,
1432      "cost_per_1m_out_cached": 0,
1433      "context_window": 2000000,
1434      "default_max_tokens": 8000,
1435      "can_reason": false,
1436      "supports_attachments": true
1437    },
1438    {
1439      "id": "xai/grok-4-fast-reasoning",
1440      "name": "Grok 4 Fast Reasoning",
1441      "cost_per_1m_in": 0.2,
1442      "cost_per_1m_out": 0.5,
1443      "cost_per_1m_in_cached": 0.05,
1444      "cost_per_1m_out_cached": 0,
1445      "context_window": 2000000,
1446      "default_max_tokens": 8000,
1447      "can_reason": true,
1448      "reasoning_levels": [
1449        "low",
1450        "medium",
1451        "high"
1452      ],
1453      "default_reasoning_effort": "medium",
1454      "supports_attachments": true
1455    },
1456    {
1457      "id": "xai/grok-4.1-fast-non-reasoning",
1458      "name": "Grok 4.1 Fast Non-Reasoning",
1459      "cost_per_1m_in": 0.2,
1460      "cost_per_1m_out": 0.5,
1461      "cost_per_1m_in_cached": 0.05,
1462      "cost_per_1m_out_cached": 0,
1463      "context_window": 2000000,
1464      "default_max_tokens": 8000,
1465      "can_reason": false,
1466      "supports_attachments": true
1467    },
1468    {
1469      "id": "xai/grok-4.1-fast-reasoning",
1470      "name": "Grok 4.1 Fast Reasoning",
1471      "cost_per_1m_in": 0.2,
1472      "cost_per_1m_out": 0.5,
1473      "cost_per_1m_in_cached": 0.05,
1474      "cost_per_1m_out_cached": 0,
1475      "context_window": 2000000,
1476      "default_max_tokens": 8000,
1477      "can_reason": true,
1478      "reasoning_levels": [
1479        "low",
1480        "medium",
1481        "high"
1482      ],
1483      "default_reasoning_effort": "medium",
1484      "supports_attachments": true
1485    },
1486    {
1487      "id": "xai/grok-4.20-non-reasoning-beta",
1488      "name": "Grok 4.20 Beta Non-Reasoning",
1489      "cost_per_1m_in": 1.25,
1490      "cost_per_1m_out": 2.5,
1491      "cost_per_1m_in_cached": 0.2,
1492      "cost_per_1m_out_cached": 0,
1493      "context_window": 2000000,
1494      "default_max_tokens": 8000,
1495      "can_reason": false,
1496      "supports_attachments": true
1497    },
1498    {
1499      "id": "xai/grok-4.20-reasoning-beta",
1500      "name": "Grok 4.20 Beta Reasoning",
1501      "cost_per_1m_in": 1.25,
1502      "cost_per_1m_out": 2.5,
1503      "cost_per_1m_in_cached": 0.2,
1504      "cost_per_1m_out_cached": 0,
1505      "context_window": 2000000,
1506      "default_max_tokens": 8000,
1507      "can_reason": true,
1508      "reasoning_levels": [
1509        "low",
1510        "medium",
1511        "high"
1512      ],
1513      "default_reasoning_effort": "medium",
1514      "supports_attachments": true
1515    },
1516    {
1517      "id": "xai/grok-4.20-multi-agent-beta",
1518      "name": "Grok 4.20 Multi Agent Beta",
1519      "cost_per_1m_in": 1.25,
1520      "cost_per_1m_out": 2.5,
1521      "cost_per_1m_in_cached": 0.2,
1522      "cost_per_1m_out_cached": 0,
1523      "context_window": 2000000,
1524      "default_max_tokens": 8000,
1525      "can_reason": true,
1526      "reasoning_levels": [
1527        "low",
1528        "medium",
1529        "high"
1530      ],
1531      "default_reasoning_effort": "medium",
1532      "supports_attachments": true
1533    },
1534    {
1535      "id": "xai/grok-4.20-multi-agent",
1536      "name": "Grok 4.20 Multi-Agent",
1537      "cost_per_1m_in": 1.25,
1538      "cost_per_1m_out": 2.5,
1539      "cost_per_1m_in_cached": 0.2,
1540      "cost_per_1m_out_cached": 0,
1541      "context_window": 2000000,
1542      "default_max_tokens": 8000,
1543      "can_reason": true,
1544      "reasoning_levels": [
1545        "low",
1546        "medium",
1547        "high"
1548      ],
1549      "default_reasoning_effort": "medium",
1550      "supports_attachments": true
1551    },
1552    {
1553      "id": "xai/grok-4.20-non-reasoning",
1554      "name": "Grok 4.20 Non-Reasoning",
1555      "cost_per_1m_in": 1.25,
1556      "cost_per_1m_out": 2.5,
1557      "cost_per_1m_in_cached": 0.2,
1558      "cost_per_1m_out_cached": 0,
1559      "context_window": 2000000,
1560      "default_max_tokens": 8000,
1561      "can_reason": false,
1562      "supports_attachments": true
1563    },
1564    {
1565      "id": "xai/grok-4.20-reasoning",
1566      "name": "Grok 4.20 Reasoning",
1567      "cost_per_1m_in": 1.25,
1568      "cost_per_1m_out": 2.5,
1569      "cost_per_1m_in_cached": 0.2,
1570      "cost_per_1m_out_cached": 0,
1571      "context_window": 2000000,
1572      "default_max_tokens": 8000,
1573      "can_reason": true,
1574      "reasoning_levels": [
1575        "low",
1576        "medium",
1577        "high"
1578      ],
1579      "default_reasoning_effort": "medium",
1580      "supports_attachments": true
1581    },
1582    {
1583      "id": "xai/grok-4.3",
1584      "name": "Grok 4.3",
1585      "cost_per_1m_in": 1.25,
1586      "cost_per_1m_out": 2.5,
1587      "cost_per_1m_in_cached": 0.2,
1588      "cost_per_1m_out_cached": 0,
1589      "context_window": 1000000,
1590      "default_max_tokens": 8000,
1591      "can_reason": true,
1592      "reasoning_levels": [
1593        "low",
1594        "medium",
1595        "high"
1596      ],
1597      "default_reasoning_effort": "medium",
1598      "supports_attachments": true
1599    },
1600    {
1601      "id": "xai/grok-code-fast-1",
1602      "name": "Grok Code Fast 1",
1603      "cost_per_1m_in": 0.2,
1604      "cost_per_1m_out": 1.5,
1605      "cost_per_1m_in_cached": 0.02,
1606      "cost_per_1m_out_cached": 0,
1607      "context_window": 256000,
1608      "default_max_tokens": 8000,
1609      "can_reason": true,
1610      "reasoning_levels": [
1611        "low",
1612        "medium",
1613        "high"
1614      ],
1615      "default_reasoning_effort": "medium",
1616      "supports_attachments": false
1617    },
1618    {
1619      "id": "kwaipilot/kat-coder-pro-v2",
1620      "name": "Kat Coder Pro V2",
1621      "cost_per_1m_in": 0.3,
1622      "cost_per_1m_out": 1.2,
1623      "cost_per_1m_in_cached": 0.06,
1624      "cost_per_1m_out_cached": 0,
1625      "context_window": 256000,
1626      "default_max_tokens": 8000,
1627      "can_reason": true,
1628      "reasoning_levels": [
1629        "low",
1630        "medium",
1631        "high"
1632      ],
1633      "default_reasoning_effort": "medium",
1634      "supports_attachments": false
1635    },
1636    {
1637      "id": "moonshotai/kimi-k2",
1638      "name": "Kimi K2 Instruct",
1639      "cost_per_1m_in": 0.57,
1640      "cost_per_1m_out": 2.3,
1641      "cost_per_1m_in_cached": 0,
1642      "cost_per_1m_out_cached": 0,
1643      "context_window": 131072,
1644      "default_max_tokens": 8000,
1645      "can_reason": false,
1646      "supports_attachments": false
1647    },
1648    {
1649      "id": "moonshotai/kimi-k2-thinking",
1650      "name": "Kimi K2 Thinking",
1651      "cost_per_1m_in": 0.6,
1652      "cost_per_1m_out": 2.5,
1653      "cost_per_1m_in_cached": 0.15,
1654      "cost_per_1m_out_cached": 0,
1655      "context_window": 262114,
1656      "default_max_tokens": 8000,
1657      "can_reason": true,
1658      "reasoning_levels": [
1659        "low",
1660        "medium",
1661        "high"
1662      ],
1663      "default_reasoning_effort": "medium",
1664      "supports_attachments": false
1665    },
1666    {
1667      "id": "moonshotai/kimi-k2-thinking-turbo",
1668      "name": "Kimi K2 Thinking Turbo",
1669      "cost_per_1m_in": 1.15,
1670      "cost_per_1m_out": 8,
1671      "cost_per_1m_in_cached": 0.15,
1672      "cost_per_1m_out_cached": 0,
1673      "context_window": 262114,
1674      "default_max_tokens": 8000,
1675      "can_reason": true,
1676      "reasoning_levels": [
1677        "low",
1678        "medium",
1679        "high"
1680      ],
1681      "default_reasoning_effort": "medium",
1682      "supports_attachments": false
1683    },
1684    {
1685      "id": "moonshotai/kimi-k2-turbo",
1686      "name": "Kimi K2 Turbo",
1687      "cost_per_1m_in": 1.15,
1688      "cost_per_1m_out": 8,
1689      "cost_per_1m_in_cached": 0.15,
1690      "cost_per_1m_out_cached": 0,
1691      "context_window": 256000,
1692      "default_max_tokens": 8000,
1693      "can_reason": false,
1694      "supports_attachments": false
1695    },
1696    {
1697      "id": "moonshotai/kimi-k2.5",
1698      "name": "Kimi K2.5",
1699      "cost_per_1m_in": 0.6,
1700      "cost_per_1m_out": 3,
1701      "cost_per_1m_in_cached": 0.1,
1702      "cost_per_1m_out_cached": 0,
1703      "context_window": 262114,
1704      "default_max_tokens": 8000,
1705      "can_reason": true,
1706      "reasoning_levels": [
1707        "low",
1708        "medium",
1709        "high"
1710      ],
1711      "default_reasoning_effort": "medium",
1712      "supports_attachments": true
1713    },
1714    {
1715      "id": "moonshotai/kimi-k2.6",
1716      "name": "Kimi K2.6",
1717      "cost_per_1m_in": 0.95,
1718      "cost_per_1m_out": 4,
1719      "cost_per_1m_in_cached": 0.16,
1720      "cost_per_1m_out_cached": 0,
1721      "context_window": 262000,
1722      "default_max_tokens": 8000,
1723      "can_reason": true,
1724      "reasoning_levels": [
1725        "low",
1726        "medium",
1727        "high"
1728      ],
1729      "default_reasoning_effort": "medium",
1730      "supports_attachments": true
1731    },
1732    {
1733      "id": "meta/llama-3.1-70b",
1734      "name": "Llama 3.1 70B Instruct",
1735      "cost_per_1m_in": 0.72,
1736      "cost_per_1m_out": 0.72,
1737      "cost_per_1m_in_cached": 0,
1738      "cost_per_1m_out_cached": 0,
1739      "context_window": 128000,
1740      "default_max_tokens": 8000,
1741      "can_reason": false,
1742      "supports_attachments": false
1743    },
1744    {
1745      "id": "meta/llama-3.1-8b",
1746      "name": "Llama 3.1 8B Instruct",
1747      "cost_per_1m_in": 0.22,
1748      "cost_per_1m_out": 0.22,
1749      "cost_per_1m_in_cached": 0,
1750      "cost_per_1m_out_cached": 0,
1751      "context_window": 128000,
1752      "default_max_tokens": 8000,
1753      "can_reason": false,
1754      "supports_attachments": false
1755    },
1756    {
1757      "id": "meta/llama-3.2-11b",
1758      "name": "Llama 3.2 11B Vision Instruct",
1759      "cost_per_1m_in": 0.16,
1760      "cost_per_1m_out": 0.16,
1761      "cost_per_1m_in_cached": 0,
1762      "cost_per_1m_out_cached": 0,
1763      "context_window": 128000,
1764      "default_max_tokens": 8000,
1765      "can_reason": false,
1766      "supports_attachments": true
1767    },
1768    {
1769      "id": "meta/llama-3.2-90b",
1770      "name": "Llama 3.2 90B Vision Instruct",
1771      "cost_per_1m_in": 0.72,
1772      "cost_per_1m_out": 0.72,
1773      "cost_per_1m_in_cached": 0,
1774      "cost_per_1m_out_cached": 0,
1775      "context_window": 128000,
1776      "default_max_tokens": 8000,
1777      "can_reason": false,
1778      "supports_attachments": true
1779    },
1780    {
1781      "id": "meta/llama-3.3-70b",
1782      "name": "Llama 3.3 70B Instruct",
1783      "cost_per_1m_in": 0.72,
1784      "cost_per_1m_out": 0.72,
1785      "cost_per_1m_in_cached": 0,
1786      "cost_per_1m_out_cached": 0,
1787      "context_window": 128000,
1788      "default_max_tokens": 8000,
1789      "can_reason": false,
1790      "supports_attachments": false
1791    },
1792    {
1793      "id": "meta/llama-4-maverick",
1794      "name": "Llama 4 Maverick 17B Instruct",
1795      "cost_per_1m_in": 0.24,
1796      "cost_per_1m_out": 0.97,
1797      "cost_per_1m_in_cached": 0,
1798      "cost_per_1m_out_cached": 0,
1799      "context_window": 128000,
1800      "default_max_tokens": 8000,
1801      "can_reason": false,
1802      "supports_attachments": true
1803    },
1804    {
1805      "id": "meta/llama-4-scout",
1806      "name": "Llama 4 Scout 17B Instruct",
1807      "cost_per_1m_in": 0.17,
1808      "cost_per_1m_out": 0.66,
1809      "cost_per_1m_in_cached": 0,
1810      "cost_per_1m_out_cached": 0,
1811      "context_window": 128000,
1812      "default_max_tokens": 8000,
1813      "can_reason": false,
1814      "supports_attachments": true
1815    },
1816    {
1817      "id": "meituan/longcat-flash-chat",
1818      "name": "LongCat Flash Chat",
1819      "cost_per_1m_in": 0,
1820      "cost_per_1m_out": 0,
1821      "cost_per_1m_in_cached": 0,
1822      "cost_per_1m_out_cached": 0,
1823      "context_window": 128000,
1824      "default_max_tokens": 8000,
1825      "can_reason": false,
1826      "supports_attachments": false
1827    },
1828    {
1829      "id": "inception/mercury-2",
1830      "name": "Mercury 2",
1831      "cost_per_1m_in": 0.25,
1832      "cost_per_1m_out": 0.75,
1833      "cost_per_1m_in_cached": 0.025,
1834      "cost_per_1m_out_cached": 0,
1835      "context_window": 128000,
1836      "default_max_tokens": 8000,
1837      "can_reason": true,
1838      "reasoning_levels": [
1839        "low",
1840        "medium",
1841        "high"
1842      ],
1843      "default_reasoning_effort": "medium",
1844      "supports_attachments": false
1845    },
1846    {
1847      "id": "inception/mercury-coder-small",
1848      "name": "Mercury Coder Small Beta",
1849      "cost_per_1m_in": 0.25,
1850      "cost_per_1m_out": 1,
1851      "cost_per_1m_in_cached": 0,
1852      "cost_per_1m_out_cached": 0,
1853      "context_window": 32000,
1854      "default_max_tokens": 8000,
1855      "can_reason": false,
1856      "supports_attachments": false
1857    },
1858    {
1859      "id": "xiaomi/mimo-v2.5",
1860      "name": "MiMo M2.5",
1861      "cost_per_1m_in": 0.4,
1862      "cost_per_1m_out": 2,
1863      "cost_per_1m_in_cached": 0.08,
1864      "cost_per_1m_out_cached": 0,
1865      "context_window": 1050000,
1866      "default_max_tokens": 8000,
1867      "can_reason": true,
1868      "reasoning_levels": [
1869        "low",
1870        "medium",
1871        "high"
1872      ],
1873      "default_reasoning_effort": "medium",
1874      "supports_attachments": true
1875    },
1876    {
1877      "id": "xiaomi/mimo-v2-flash",
1878      "name": "MiMo V2 Flash",
1879      "cost_per_1m_in": 0.1,
1880      "cost_per_1m_out": 0.3,
1881      "cost_per_1m_in_cached": 0.01,
1882      "cost_per_1m_out_cached": 0,
1883      "context_window": 262144,
1884      "default_max_tokens": 8000,
1885      "can_reason": true,
1886      "reasoning_levels": [
1887        "low",
1888        "medium",
1889        "high"
1890      ],
1891      "default_reasoning_effort": "medium",
1892      "supports_attachments": false
1893    },
1894    {
1895      "id": "xiaomi/mimo-v2-pro",
1896      "name": "MiMo V2 Pro",
1897      "cost_per_1m_in": 1,
1898      "cost_per_1m_out": 3,
1899      "cost_per_1m_in_cached": 0.2,
1900      "cost_per_1m_out_cached": 0,
1901      "context_window": 1000000,
1902      "default_max_tokens": 8000,
1903      "can_reason": true,
1904      "reasoning_levels": [
1905        "low",
1906        "medium",
1907        "high"
1908      ],
1909      "default_reasoning_effort": "medium",
1910      "supports_attachments": false
1911    },
1912    {
1913      "id": "xiaomi/mimo-v2.5-pro",
1914      "name": "MiMo V2.5 Pro",
1915      "cost_per_1m_in": 1,
1916      "cost_per_1m_out": 3,
1917      "cost_per_1m_in_cached": 0.2,
1918      "cost_per_1m_out_cached": 0,
1919      "context_window": 1050000,
1920      "default_max_tokens": 8000,
1921      "can_reason": true,
1922      "reasoning_levels": [
1923        "low",
1924        "medium",
1925        "high"
1926      ],
1927      "default_reasoning_effort": "medium",
1928      "supports_attachments": true
1929    },
1930    {
1931      "id": "minimax/minimax-m2",
1932      "name": "MiniMax M2",
1933      "cost_per_1m_in": 0.3,
1934      "cost_per_1m_out": 1.2,
1935      "cost_per_1m_in_cached": 0.03,
1936      "cost_per_1m_out_cached": 0.375,
1937      "context_window": 205000,
1938      "default_max_tokens": 8000,
1939      "can_reason": true,
1940      "reasoning_levels": [
1941        "low",
1942        "medium",
1943        "high"
1944      ],
1945      "default_reasoning_effort": "medium",
1946      "supports_attachments": false
1947    },
1948    {
1949      "id": "minimax/minimax-m2.1",
1950      "name": "MiniMax M2.1",
1951      "cost_per_1m_in": 0.3,
1952      "cost_per_1m_out": 1.2,
1953      "cost_per_1m_in_cached": 0.03,
1954      "cost_per_1m_out_cached": 0.375,
1955      "context_window": 204800,
1956      "default_max_tokens": 8000,
1957      "can_reason": true,
1958      "reasoning_levels": [
1959        "low",
1960        "medium",
1961        "high"
1962      ],
1963      "default_reasoning_effort": "medium",
1964      "supports_attachments": false
1965    },
1966    {
1967      "id": "minimax/minimax-m2.1-lightning",
1968      "name": "MiniMax M2.1 Lightning",
1969      "cost_per_1m_in": 0.3,
1970      "cost_per_1m_out": 2.4,
1971      "cost_per_1m_in_cached": 0.03,
1972      "cost_per_1m_out_cached": 0.375,
1973      "context_window": 204800,
1974      "default_max_tokens": 8000,
1975      "can_reason": true,
1976      "reasoning_levels": [
1977        "low",
1978        "medium",
1979        "high"
1980      ],
1981      "default_reasoning_effort": "medium",
1982      "supports_attachments": false
1983    },
1984    {
1985      "id": "minimax/minimax-m2.5",
1986      "name": "MiniMax M2.5",
1987      "cost_per_1m_in": 0.3,
1988      "cost_per_1m_out": 1.2,
1989      "cost_per_1m_in_cached": 0.03,
1990      "cost_per_1m_out_cached": 0.375,
1991      "context_window": 204800,
1992      "default_max_tokens": 8000,
1993      "can_reason": true,
1994      "reasoning_levels": [
1995        "low",
1996        "medium",
1997        "high"
1998      ],
1999      "default_reasoning_effort": "medium",
2000      "supports_attachments": false
2001    },
2002    {
2003      "id": "minimax/minimax-m2.5-highspeed",
2004      "name": "MiniMax M2.5 High Speed",
2005      "cost_per_1m_in": 0.6,
2006      "cost_per_1m_out": 2.4,
2007      "cost_per_1m_in_cached": 0.03,
2008      "cost_per_1m_out_cached": 0.375,
2009      "context_window": 204800,
2010      "default_max_tokens": 8000,
2011      "can_reason": true,
2012      "reasoning_levels": [
2013        "low",
2014        "medium",
2015        "high"
2016      ],
2017      "default_reasoning_effort": "medium",
2018      "supports_attachments": false
2019    },
2020    {
2021      "id": "minimax/minimax-m2.7-highspeed",
2022      "name": "MiniMax M2.7 High Speed",
2023      "cost_per_1m_in": 0.6,
2024      "cost_per_1m_out": 2.4,
2025      "cost_per_1m_in_cached": 0.06,
2026      "cost_per_1m_out_cached": 0.375,
2027      "context_window": 204800,
2028      "default_max_tokens": 8000,
2029      "can_reason": true,
2030      "reasoning_levels": [
2031        "low",
2032        "medium",
2033        "high"
2034      ],
2035      "default_reasoning_effort": "medium",
2036      "supports_attachments": true
2037    },
2038    {
2039      "id": "minimax/minimax-m2.7",
2040      "name": "Minimax M2.7",
2041      "cost_per_1m_in": 0.3,
2042      "cost_per_1m_out": 1.2,
2043      "cost_per_1m_in_cached": 0.06,
2044      "cost_per_1m_out_cached": 0.375,
2045      "context_window": 204800,
2046      "default_max_tokens": 8000,
2047      "can_reason": true,
2048      "reasoning_levels": [
2049        "low",
2050        "medium",
2051        "high"
2052      ],
2053      "default_reasoning_effort": "medium",
2054      "supports_attachments": true
2055    },
2056    {
2057      "id": "mistral/ministral-3b",
2058      "name": "Ministral 3B",
2059      "cost_per_1m_in": 0.1,
2060      "cost_per_1m_out": 0.1,
2061      "cost_per_1m_in_cached": 0,
2062      "cost_per_1m_out_cached": 0,
2063      "context_window": 128000,
2064      "default_max_tokens": 4000,
2065      "can_reason": false,
2066      "supports_attachments": false
2067    },
2068    {
2069      "id": "mistral/ministral-8b",
2070      "name": "Ministral 8B",
2071      "cost_per_1m_in": 0.15,
2072      "cost_per_1m_out": 0.15,
2073      "cost_per_1m_in_cached": 0,
2074      "cost_per_1m_out_cached": 0,
2075      "context_window": 128000,
2076      "default_max_tokens": 4000,
2077      "can_reason": false,
2078      "supports_attachments": false
2079    },
2080    {
2081      "id": "mistral/codestral",
2082      "name": "Mistral Codestral",
2083      "cost_per_1m_in": 0.3,
2084      "cost_per_1m_out": 0.9,
2085      "cost_per_1m_in_cached": 0,
2086      "cost_per_1m_out_cached": 0,
2087      "context_window": 128000,
2088      "default_max_tokens": 4000,
2089      "can_reason": false,
2090      "supports_attachments": false
2091    },
2092    {
2093      "id": "mistral/mistral-medium",
2094      "name": "Mistral Medium 3.1",
2095      "cost_per_1m_in": 0.4,
2096      "cost_per_1m_out": 2,
2097      "cost_per_1m_in_cached": 0,
2098      "cost_per_1m_out_cached": 0,
2099      "context_window": 128000,
2100      "default_max_tokens": 8000,
2101      "can_reason": false,
2102      "supports_attachments": true
2103    },
2104    {
2105      "id": "mistral/mistral-small",
2106      "name": "Mistral Small",
2107      "cost_per_1m_in": 0.1,
2108      "cost_per_1m_out": 0.3,
2109      "cost_per_1m_in_cached": 0,
2110      "cost_per_1m_out_cached": 0,
2111      "context_window": 32000,
2112      "default_max_tokens": 4000,
2113      "can_reason": false,
2114      "supports_attachments": true
2115    },
2116    {
2117      "id": "nvidia/nemotron-nano-12b-v2-vl",
2118      "name": "Nvidia Nemotron Nano 12B V2 VL",
2119      "cost_per_1m_in": 0.2,
2120      "cost_per_1m_out": 0.6,
2121      "cost_per_1m_in_cached": 0,
2122      "cost_per_1m_out_cached": 0,
2123      "context_window": 131072,
2124      "default_max_tokens": 8000,
2125      "can_reason": true,
2126      "reasoning_levels": [
2127        "low",
2128        "medium",
2129        "high"
2130      ],
2131      "default_reasoning_effort": "medium",
2132      "supports_attachments": true
2133    },
2134    {
2135      "id": "nvidia/nemotron-nano-9b-v2",
2136      "name": "Nvidia Nemotron Nano 9B V2",
2137      "cost_per_1m_in": 0.06,
2138      "cost_per_1m_out": 0.23,
2139      "cost_per_1m_in_cached": 0,
2140      "cost_per_1m_out_cached": 0,
2141      "context_window": 131072,
2142      "default_max_tokens": 8000,
2143      "can_reason": true,
2144      "reasoning_levels": [
2145        "low",
2146        "medium",
2147        "high"
2148      ],
2149      "default_reasoning_effort": "medium",
2150      "supports_attachments": false
2151    },
2152    {
2153      "id": "mistral/pixtral-12b",
2154      "name": "Pixtral 12B 2409",
2155      "cost_per_1m_in": 0.15,
2156      "cost_per_1m_out": 0.15,
2157      "cost_per_1m_in_cached": 0,
2158      "cost_per_1m_out_cached": 0,
2159      "context_window": 128000,
2160      "default_max_tokens": 4000,
2161      "can_reason": false,
2162      "supports_attachments": true
2163    },
2164    {
2165      "id": "mistral/pixtral-large",
2166      "name": "Pixtral Large",
2167      "cost_per_1m_in": 2,
2168      "cost_per_1m_out": 6,
2169      "cost_per_1m_in_cached": 0,
2170      "cost_per_1m_out_cached": 0,
2171      "context_window": 128000,
2172      "default_max_tokens": 4000,
2173      "can_reason": false,
2174      "supports_attachments": true
2175    },
2176    {
2177      "id": "alibaba/qwen-3-32b",
2178      "name": "Qwen 3 32B",
2179      "cost_per_1m_in": 0.16,
2180      "cost_per_1m_out": 0.64,
2181      "cost_per_1m_in_cached": 0,
2182      "cost_per_1m_out_cached": 0,
2183      "context_window": 128000,
2184      "default_max_tokens": 8000,
2185      "can_reason": true,
2186      "reasoning_levels": [
2187        "low",
2188        "medium",
2189        "high"
2190      ],
2191      "default_reasoning_effort": "medium",
2192      "supports_attachments": false
2193    },
2194    {
2195      "id": "alibaba/qwen3-coder-30b-a3b",
2196      "name": "Qwen 3 Coder 30B A3B Instruct",
2197      "cost_per_1m_in": 0.15,
2198      "cost_per_1m_out": 0.6,
2199      "cost_per_1m_in_cached": 0,
2200      "cost_per_1m_out_cached": 0,
2201      "context_window": 262144,
2202      "default_max_tokens": 8000,
2203      "can_reason": true,
2204      "reasoning_levels": [
2205        "low",
2206        "medium",
2207        "high"
2208      ],
2209      "default_reasoning_effort": "medium",
2210      "supports_attachments": false
2211    },
2212    {
2213      "id": "alibaba/qwen3-max-thinking",
2214      "name": "Qwen 3 Max Thinking",
2215      "cost_per_1m_in": 1.2,
2216      "cost_per_1m_out": 6,
2217      "cost_per_1m_in_cached": 0.24,
2218      "cost_per_1m_out_cached": 0,
2219      "context_window": 256000,
2220      "default_max_tokens": 8000,
2221      "can_reason": true,
2222      "reasoning_levels": [
2223        "low",
2224        "medium",
2225        "high"
2226      ],
2227      "default_reasoning_effort": "medium",
2228      "supports_attachments": false
2229    },
2230    {
2231      "id": "alibaba/qwen3.5-flash",
2232      "name": "Qwen 3.5 Flash",
2233      "cost_per_1m_in": 0.1,
2234      "cost_per_1m_out": 0.4,
2235      "cost_per_1m_in_cached": 0.001,
2236      "cost_per_1m_out_cached": 0.125,
2237      "context_window": 1000000,
2238      "default_max_tokens": 8000,
2239      "can_reason": true,
2240      "reasoning_levels": [
2241        "low",
2242        "medium",
2243        "high"
2244      ],
2245      "default_reasoning_effort": "medium",
2246      "supports_attachments": true
2247    },
2248    {
2249      "id": "alibaba/qwen3.5-plus",
2250      "name": "Qwen 3.5 Plus",
2251      "cost_per_1m_in": 0.4,
2252      "cost_per_1m_out": 2.4,
2253      "cost_per_1m_in_cached": 0.04,
2254      "cost_per_1m_out_cached": 0.5,
2255      "context_window": 1000000,
2256      "default_max_tokens": 8000,
2257      "can_reason": true,
2258      "reasoning_levels": [
2259        "low",
2260        "medium",
2261        "high"
2262      ],
2263      "default_reasoning_effort": "medium",
2264      "supports_attachments": true
2265    },
2266    {
2267      "id": "alibaba/qwen3.6-27b",
2268      "name": "Qwen 3.6 27B",
2269      "cost_per_1m_in": 0.6,
2270      "cost_per_1m_out": 3.6,
2271      "cost_per_1m_in_cached": 0,
2272      "cost_per_1m_out_cached": 0,
2273      "context_window": 256000,
2274      "default_max_tokens": 8000,
2275      "can_reason": true,
2276      "reasoning_levels": [
2277        "low",
2278        "medium",
2279        "high"
2280      ],
2281      "default_reasoning_effort": "medium",
2282      "supports_attachments": true
2283    },
2284    {
2285      "id": "alibaba/qwen-3.6-max-preview",
2286      "name": "Qwen 3.6 Max Preview",
2287      "cost_per_1m_in": 1.3,
2288      "cost_per_1m_out": 7.8,
2289      "cost_per_1m_in_cached": 0.26,
2290      "cost_per_1m_out_cached": 1.625,
2291      "context_window": 240000,
2292      "default_max_tokens": 8000,
2293      "can_reason": true,
2294      "reasoning_levels": [
2295        "low",
2296        "medium",
2297        "high"
2298      ],
2299      "default_reasoning_effort": "medium",
2300      "supports_attachments": true
2301    },
2302    {
2303      "id": "alibaba/qwen3.6-plus",
2304      "name": "Qwen 3.6 Plus",
2305      "cost_per_1m_in": 0.5,
2306      "cost_per_1m_out": 3,
2307      "cost_per_1m_in_cached": 0.1,
2308      "cost_per_1m_out_cached": 0.625,
2309      "context_window": 1000000,
2310      "default_max_tokens": 8000,
2311      "can_reason": true,
2312      "reasoning_levels": [
2313        "low",
2314        "medium",
2315        "high"
2316      ],
2317      "default_reasoning_effort": "medium",
2318      "supports_attachments": true
2319    },
2320    {
2321      "id": "alibaba/qwen-3-235b",
2322      "name": "Qwen3 235B A22b Instruct 2507",
2323      "cost_per_1m_in": 0.6,
2324      "cost_per_1m_out": 1.2,
2325      "cost_per_1m_in_cached": 0.6,
2326      "cost_per_1m_out_cached": 0,
2327      "context_window": 131000,
2328      "default_max_tokens": 8000,
2329      "can_reason": false,
2330      "supports_attachments": false
2331    },
2332    {
2333      "id": "alibaba/qwen3-coder",
2334      "name": "Qwen3 Coder 480B A35B Instruct",
2335      "cost_per_1m_in": 1.5,
2336      "cost_per_1m_out": 7.5,
2337      "cost_per_1m_in_cached": 0.3,
2338      "cost_per_1m_out_cached": 0,
2339      "context_window": 262144,
2340      "default_max_tokens": 8000,
2341      "can_reason": false,
2342      "supports_attachments": false
2343    },
2344    {
2345      "id": "alibaba/qwen3-coder-next",
2346      "name": "Qwen3 Coder Next",
2347      "cost_per_1m_in": 0.5,
2348      "cost_per_1m_out": 1.2,
2349      "cost_per_1m_in_cached": 0,
2350      "cost_per_1m_out_cached": 0,
2351      "context_window": 256000,
2352      "default_max_tokens": 8000,
2353      "can_reason": false,
2354      "supports_attachments": false
2355    },
2356    {
2357      "id": "alibaba/qwen3-coder-plus",
2358      "name": "Qwen3 Coder Plus",
2359      "cost_per_1m_in": 1,
2360      "cost_per_1m_out": 5,
2361      "cost_per_1m_in_cached": 0.2,
2362      "cost_per_1m_out_cached": 0,
2363      "context_window": 1000000,
2364      "default_max_tokens": 8000,
2365      "can_reason": false,
2366      "supports_attachments": false
2367    },
2368    {
2369      "id": "alibaba/qwen3-max",
2370      "name": "Qwen3 Max",
2371      "cost_per_1m_in": 1.2,
2372      "cost_per_1m_out": 6,
2373      "cost_per_1m_in_cached": 0.24,
2374      "cost_per_1m_out_cached": 0,
2375      "context_window": 262144,
2376      "default_max_tokens": 8000,
2377      "can_reason": false,
2378      "supports_attachments": false
2379    },
2380    {
2381      "id": "alibaba/qwen3-max-preview",
2382      "name": "Qwen3 Max Preview",
2383      "cost_per_1m_in": 1.2,
2384      "cost_per_1m_out": 6,
2385      "cost_per_1m_in_cached": 0.24,
2386      "cost_per_1m_out_cached": 0,
2387      "context_window": 262144,
2388      "default_max_tokens": 8000,
2389      "can_reason": false,
2390      "supports_attachments": false
2391    },
2392    {
2393      "id": "alibaba/qwen3-vl-thinking",
2394      "name": "Qwen3 VL 235B A22B Thinking",
2395      "cost_per_1m_in": 0.4,
2396      "cost_per_1m_out": 4,
2397      "cost_per_1m_in_cached": 0,
2398      "cost_per_1m_out_cached": 0,
2399      "context_window": 131072,
2400      "default_max_tokens": 8000,
2401      "can_reason": true,
2402      "reasoning_levels": [
2403        "low",
2404        "medium",
2405        "high"
2406      ],
2407      "default_reasoning_effort": "medium",
2408      "supports_attachments": true
2409    },
2410    {
2411      "id": "alibaba/qwen3-235b-a22b-thinking",
2412      "name": "Qwen3 VL 235B A22B Thinking",
2413      "cost_per_1m_in": 0.4,
2414      "cost_per_1m_out": 4,
2415      "cost_per_1m_in_cached": 0,
2416      "cost_per_1m_out_cached": 0,
2417      "context_window": 131072,
2418      "default_max_tokens": 8000,
2419      "can_reason": true,
2420      "reasoning_levels": [
2421        "low",
2422        "medium",
2423        "high"
2424      ],
2425      "default_reasoning_effort": "medium",
2426      "supports_attachments": true
2427    },
2428    {
2429      "id": "alibaba/qwen-3-14b",
2430      "name": "Qwen3-14B",
2431      "cost_per_1m_in": 0.12,
2432      "cost_per_1m_out": 0.24,
2433      "cost_per_1m_in_cached": 0,
2434      "cost_per_1m_out_cached": 0,
2435      "context_window": 40960,
2436      "default_max_tokens": 8000,
2437      "can_reason": true,
2438      "reasoning_levels": [
2439        "low",
2440        "medium",
2441        "high"
2442      ],
2443      "default_reasoning_effort": "medium",
2444      "supports_attachments": false
2445    },
2446    {
2447      "id": "alibaba/qwen-3-30b",
2448      "name": "Qwen3-30B-A3B",
2449      "cost_per_1m_in": 0.08,
2450      "cost_per_1m_out": 0.29,
2451      "cost_per_1m_in_cached": 0,
2452      "cost_per_1m_out_cached": 0,
2453      "context_window": 40960,
2454      "default_max_tokens": 8000,
2455      "can_reason": true,
2456      "reasoning_levels": [
2457        "low",
2458        "medium",
2459        "high"
2460      ],
2461      "default_reasoning_effort": "medium",
2462      "supports_attachments": false
2463    },
2464    {
2465      "id": "bytedance/seed-1.6",
2466      "name": "Seed 1.6",
2467      "cost_per_1m_in": 0.25,
2468      "cost_per_1m_out": 2,
2469      "cost_per_1m_in_cached": 0.05,
2470      "cost_per_1m_out_cached": 0,
2471      "context_window": 256000,
2472      "default_max_tokens": 8000,
2473      "can_reason": true,
2474      "reasoning_levels": [
2475        "low",
2476        "medium",
2477        "high"
2478      ],
2479      "default_reasoning_effort": "medium",
2480      "supports_attachments": false
2481    },
2482    {
2483      "id": "perplexity/sonar",
2484      "name": "Sonar",
2485      "cost_per_1m_in": 0,
2486      "cost_per_1m_out": 0,
2487      "cost_per_1m_in_cached": 0,
2488      "cost_per_1m_out_cached": 0,
2489      "context_window": 127000,
2490      "default_max_tokens": 8000,
2491      "can_reason": false,
2492      "supports_attachments": true
2493    },
2494    {
2495      "id": "perplexity/sonar-pro",
2496      "name": "Sonar Pro",
2497      "cost_per_1m_in": 0,
2498      "cost_per_1m_out": 0,
2499      "cost_per_1m_in_cached": 0,
2500      "cost_per_1m_out_cached": 0,
2501      "context_window": 200000,
2502      "default_max_tokens": 8000,
2503      "can_reason": false,
2504      "supports_attachments": true
2505    },
2506    {
2507      "id": "arcee-ai/trinity-large-preview",
2508      "name": "Trinity Large Preview",
2509      "cost_per_1m_in": 0.25,
2510      "cost_per_1m_out": 1,
2511      "cost_per_1m_in_cached": 0,
2512      "cost_per_1m_out_cached": 0,
2513      "context_window": 131000,
2514      "default_max_tokens": 8000,
2515      "can_reason": false,
2516      "supports_attachments": false
2517    },
2518    {
2519      "id": "arcee-ai/trinity-large-thinking",
2520      "name": "Trinity Large Thinking",
2521      "cost_per_1m_in": 0.25,
2522      "cost_per_1m_out": 0.9,
2523      "cost_per_1m_in_cached": 0,
2524      "cost_per_1m_out_cached": 0,
2525      "context_window": 262100,
2526      "default_max_tokens": 8000,
2527      "can_reason": true,
2528      "reasoning_levels": [
2529        "low",
2530        "medium",
2531        "high"
2532      ],
2533      "default_reasoning_effort": "medium",
2534      "supports_attachments": false
2535    },
2536    {
2537      "id": "openai/o1",
2538      "name": "o1",
2539      "cost_per_1m_in": 15,
2540      "cost_per_1m_out": 60,
2541      "cost_per_1m_in_cached": 7.5,
2542      "cost_per_1m_out_cached": 0,
2543      "context_window": 200000,
2544      "default_max_tokens": 8000,
2545      "can_reason": true,
2546      "reasoning_levels": [
2547        "low",
2548        "medium",
2549        "high"
2550      ],
2551      "default_reasoning_effort": "medium",
2552      "supports_attachments": true
2553    },
2554    {
2555      "id": "openai/o3",
2556      "name": "o3",
2557      "cost_per_1m_in": 2,
2558      "cost_per_1m_out": 8,
2559      "cost_per_1m_in_cached": 0.5,
2560      "cost_per_1m_out_cached": 0,
2561      "context_window": 200000,
2562      "default_max_tokens": 8000,
2563      "can_reason": true,
2564      "reasoning_levels": [
2565        "low",
2566        "medium",
2567        "high"
2568      ],
2569      "default_reasoning_effort": "medium",
2570      "supports_attachments": true
2571    },
2572    {
2573      "id": "openai/o3-pro",
2574      "name": "o3 Pro",
2575      "cost_per_1m_in": 20,
2576      "cost_per_1m_out": 80,
2577      "cost_per_1m_in_cached": 0,
2578      "cost_per_1m_out_cached": 0,
2579      "context_window": 200000,
2580      "default_max_tokens": 8000,
2581      "can_reason": true,
2582      "reasoning_levels": [
2583        "low",
2584        "medium",
2585        "high"
2586      ],
2587      "default_reasoning_effort": "medium",
2588      "supports_attachments": true
2589    },
2590    {
2591      "id": "openai/o3-deep-research",
2592      "name": "o3-deep-research",
2593      "cost_per_1m_in": 10,
2594      "cost_per_1m_out": 40,
2595      "cost_per_1m_in_cached": 2.5,
2596      "cost_per_1m_out_cached": 0,
2597      "context_window": 200000,
2598      "default_max_tokens": 8000,
2599      "can_reason": true,
2600      "reasoning_levels": [
2601        "low",
2602        "medium",
2603        "high"
2604      ],
2605      "default_reasoning_effort": "medium",
2606      "supports_attachments": true
2607    },
2608    {
2609      "id": "openai/o3-mini",
2610      "name": "o3-mini",
2611      "cost_per_1m_in": 1.1,
2612      "cost_per_1m_out": 4.4,
2613      "cost_per_1m_in_cached": 0.55,
2614      "cost_per_1m_out_cached": 0,
2615      "context_window": 200000,
2616      "default_max_tokens": 8000,
2617      "can_reason": true,
2618      "reasoning_levels": [
2619        "low",
2620        "medium",
2621        "high"
2622      ],
2623      "default_reasoning_effort": "medium",
2624      "supports_attachments": false
2625    },
2626    {
2627      "id": "openai/o4-mini",
2628      "name": "o4-mini",
2629      "cost_per_1m_in": 1.1,
2630      "cost_per_1m_out": 4.4,
2631      "cost_per_1m_in_cached": 0.275,
2632      "cost_per_1m_out_cached": 0,
2633      "context_window": 200000,
2634      "default_max_tokens": 8000,
2635      "can_reason": true,
2636      "reasoning_levels": [
2637        "low",
2638        "medium",
2639        "high"
2640      ],
2641      "default_reasoning_effort": "medium",
2642      "supports_attachments": true
2643    }
2644  ],
2645  "default_headers": {
2646    "HTTP-Referer": "https://charm.land",
2647    "X-Title": "Crush"
2648  }
2649}