vercel.json

   1{
   2  "name": "Vercel",
   3  "id": "vercel",
   4  "api_key": "$VERCEL_API_KEY",
   5  "api_endpoint": "https://ai-gateway.vercel.sh/v1",
   6  "type": "vercel",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-haiku-4.5",
   9  "models": [
  10    {
  11      "id": "anthropic/claude-3-haiku",
  12      "name": "Claude 3 Haiku",
  13      "cost_per_1m_in": 0.25,
  14      "cost_per_1m_out": 1.25,
  15      "cost_per_1m_in_cached": 0.03,
  16      "cost_per_1m_out_cached": 0.3,
  17      "context_window": 200000,
  18      "default_max_tokens": 4096,
  19      "can_reason": false,
  20      "supports_attachments": true,
  21      "options": {}
  22    },
  23    {
  24      "id": "anthropic/claude-3.5-haiku",
  25      "name": "Claude 3.5 Haiku",
  26      "cost_per_1m_in": 0.7999999999999999,
  27      "cost_per_1m_out": 4,
  28      "cost_per_1m_in_cached": 0.08,
  29      "cost_per_1m_out_cached": 1,
  30      "context_window": 200000,
  31      "default_max_tokens": 8000,
  32      "can_reason": false,
  33      "supports_attachments": true,
  34      "options": {}
  35    },
  36    {
  37      "id": "anthropic/claude-3.5-sonnet",
  38      "name": "Claude 3.5 Sonnet",
  39      "cost_per_1m_in": 3,
  40      "cost_per_1m_out": 15,
  41      "cost_per_1m_in_cached": 0.3,
  42      "cost_per_1m_out_cached": 3.75,
  43      "context_window": 200000,
  44      "default_max_tokens": 8000,
  45      "can_reason": false,
  46      "supports_attachments": true,
  47      "options": {}
  48    },
  49    {
  50      "id": "anthropic/claude-3.5-sonnet-20240620",
  51      "name": "Claude 3.5 Sonnet (2024-06-20)",
  52      "cost_per_1m_in": 3,
  53      "cost_per_1m_out": 15,
  54      "cost_per_1m_in_cached": 0,
  55      "cost_per_1m_out_cached": 0,
  56      "context_window": 200000,
  57      "default_max_tokens": 8000,
  58      "can_reason": false,
  59      "supports_attachments": true,
  60      "options": {}
  61    },
  62    {
  63      "id": "anthropic/claude-3.7-sonnet",
  64      "name": "Claude 3.7 Sonnet",
  65      "cost_per_1m_in": 3,
  66      "cost_per_1m_out": 15,
  67      "cost_per_1m_in_cached": 0.3,
  68      "cost_per_1m_out_cached": 3.75,
  69      "context_window": 200000,
  70      "default_max_tokens": 8000,
  71      "can_reason": true,
  72      "reasoning_levels": [
  73        "none",
  74        "minimal",
  75        "low",
  76        "medium",
  77        "high",
  78        "xhigh"
  79      ],
  80      "default_reasoning_effort": "medium",
  81      "supports_attachments": true,
  82      "options": {}
  83    },
  84    {
  85      "id": "anthropic/claude-haiku-4.5",
  86      "name": "Claude Haiku 4.5",
  87      "cost_per_1m_in": 1,
  88      "cost_per_1m_out": 5,
  89      "cost_per_1m_in_cached": 0.09999999999999999,
  90      "cost_per_1m_out_cached": 1.25,
  91      "context_window": 200000,
  92      "default_max_tokens": 8000,
  93      "can_reason": true,
  94      "reasoning_levels": [
  95        "none",
  96        "minimal",
  97        "low",
  98        "medium",
  99        "high",
 100        "xhigh"
 101      ],
 102      "default_reasoning_effort": "medium",
 103      "supports_attachments": true,
 104      "options": {}
 105    },
 106    {
 107      "id": "anthropic/claude-opus-4",
 108      "name": "Claude Opus 4",
 109      "cost_per_1m_in": 15,
 110      "cost_per_1m_out": 75,
 111      "cost_per_1m_in_cached": 1.5,
 112      "cost_per_1m_out_cached": 18.75,
 113      "context_window": 200000,
 114      "default_max_tokens": 8000,
 115      "can_reason": true,
 116      "reasoning_levels": [
 117        "none",
 118        "minimal",
 119        "low",
 120        "medium",
 121        "high",
 122        "xhigh"
 123      ],
 124      "default_reasoning_effort": "medium",
 125      "supports_attachments": true,
 126      "options": {}
 127    },
 128    {
 129      "id": "anthropic/claude-opus-4.1",
 130      "name": "Claude Opus 4.1",
 131      "cost_per_1m_in": 15,
 132      "cost_per_1m_out": 75,
 133      "cost_per_1m_in_cached": 1.5,
 134      "cost_per_1m_out_cached": 18.75,
 135      "context_window": 200000,
 136      "default_max_tokens": 8000,
 137      "can_reason": true,
 138      "reasoning_levels": [
 139        "none",
 140        "minimal",
 141        "low",
 142        "medium",
 143        "high",
 144        "xhigh"
 145      ],
 146      "default_reasoning_effort": "medium",
 147      "supports_attachments": true,
 148      "options": {}
 149    },
 150    {
 151      "id": "anthropic/claude-opus-4.5",
 152      "name": "Claude Opus 4.5",
 153      "cost_per_1m_in": 5,
 154      "cost_per_1m_out": 25,
 155      "cost_per_1m_in_cached": 0.5,
 156      "cost_per_1m_out_cached": 6.25,
 157      "context_window": 200000,
 158      "default_max_tokens": 8000,
 159      "can_reason": true,
 160      "reasoning_levels": [
 161        "none",
 162        "minimal",
 163        "low",
 164        "medium",
 165        "high",
 166        "xhigh"
 167      ],
 168      "default_reasoning_effort": "medium",
 169      "supports_attachments": true,
 170      "options": {}
 171    },
 172    {
 173      "id": "anthropic/claude-opus-4.6",
 174      "name": "Claude Opus 4.6",
 175      "cost_per_1m_in": 5,
 176      "cost_per_1m_out": 25,
 177      "cost_per_1m_in_cached": 0.5,
 178      "cost_per_1m_out_cached": 6.25,
 179      "context_window": 1000000,
 180      "default_max_tokens": 8000,
 181      "can_reason": true,
 182      "reasoning_levels": [
 183        "none",
 184        "minimal",
 185        "low",
 186        "medium",
 187        "high",
 188        "xhigh"
 189      ],
 190      "default_reasoning_effort": "medium",
 191      "supports_attachments": true,
 192      "options": {}
 193    },
 194    {
 195      "id": "anthropic/claude-sonnet-4",
 196      "name": "Claude Sonnet 4",
 197      "cost_per_1m_in": 3,
 198      "cost_per_1m_out": 15,
 199      "cost_per_1m_in_cached": 0.3,
 200      "cost_per_1m_out_cached": 3.75,
 201      "context_window": 1000000,
 202      "default_max_tokens": 8000,
 203      "can_reason": true,
 204      "reasoning_levels": [
 205        "none",
 206        "minimal",
 207        "low",
 208        "medium",
 209        "high",
 210        "xhigh"
 211      ],
 212      "default_reasoning_effort": "medium",
 213      "supports_attachments": true,
 214      "options": {}
 215    },
 216    {
 217      "id": "anthropic/claude-sonnet-4.5",
 218      "name": "Claude Sonnet 4.5",
 219      "cost_per_1m_in": 3,
 220      "cost_per_1m_out": 15,
 221      "cost_per_1m_in_cached": 0.3,
 222      "cost_per_1m_out_cached": 3.75,
 223      "context_window": 1000000,
 224      "default_max_tokens": 8000,
 225      "can_reason": true,
 226      "reasoning_levels": [
 227        "none",
 228        "minimal",
 229        "low",
 230        "medium",
 231        "high",
 232        "xhigh"
 233      ],
 234      "default_reasoning_effort": "medium",
 235      "supports_attachments": true,
 236      "options": {}
 237    },
 238    {
 239      "id": "anthropic/claude-sonnet-4.6",
 240      "name": "Claude Sonnet 4.6",
 241      "cost_per_1m_in": 3,
 242      "cost_per_1m_out": 15,
 243      "cost_per_1m_in_cached": 0.3,
 244      "cost_per_1m_out_cached": 3.75,
 245      "context_window": 1000000,
 246      "default_max_tokens": 8000,
 247      "can_reason": true,
 248      "reasoning_levels": [
 249        "none",
 250        "minimal",
 251        "low",
 252        "medium",
 253        "high",
 254        "xhigh"
 255      ],
 256      "default_reasoning_effort": "medium",
 257      "supports_attachments": true,
 258      "options": {}
 259    },
 260    {
 261      "id": "openai/codex-mini",
 262      "name": "Codex Mini",
 263      "cost_per_1m_in": 1.5,
 264      "cost_per_1m_out": 6,
 265      "cost_per_1m_in_cached": 0.375,
 266      "cost_per_1m_out_cached": 0,
 267      "context_window": 200000,
 268      "default_max_tokens": 8000,
 269      "can_reason": true,
 270      "reasoning_levels": [
 271        "low",
 272        "medium",
 273        "high"
 274      ],
 275      "default_reasoning_effort": "medium",
 276      "supports_attachments": true,
 277      "options": {}
 278    },
 279    {
 280      "id": "cohere/command-a",
 281      "name": "Command A",
 282      "cost_per_1m_in": 2.5,
 283      "cost_per_1m_out": 10,
 284      "cost_per_1m_in_cached": 0,
 285      "cost_per_1m_out_cached": 0,
 286      "context_window": 256000,
 287      "default_max_tokens": 8000,
 288      "can_reason": false,
 289      "supports_attachments": false,
 290      "options": {}
 291    },
 292    {
 293      "id": "deepseek/deepseek-v3",
 294      "name": "DeepSeek V3 0324",
 295      "cost_per_1m_in": 0.77,
 296      "cost_per_1m_out": 0.77,
 297      "cost_per_1m_in_cached": 0,
 298      "cost_per_1m_out_cached": 0,
 299      "context_window": 163840,
 300      "default_max_tokens": 8000,
 301      "can_reason": false,
 302      "supports_attachments": false,
 303      "options": {}
 304    },
 305    {
 306      "id": "deepseek/deepseek-v3.1-terminus",
 307      "name": "DeepSeek V3.1 Terminus",
 308      "cost_per_1m_in": 0.27,
 309      "cost_per_1m_out": 1,
 310      "cost_per_1m_in_cached": 0,
 311      "cost_per_1m_out_cached": 0,
 312      "context_window": 131072,
 313      "default_max_tokens": 8000,
 314      "can_reason": true,
 315      "reasoning_levels": [
 316        "low",
 317        "medium",
 318        "high"
 319      ],
 320      "default_reasoning_effort": "medium",
 321      "supports_attachments": false,
 322      "options": {}
 323    },
 324    {
 325      "id": "deepseek/deepseek-v3.2",
 326      "name": "DeepSeek V3.2",
 327      "cost_per_1m_in": 0.26,
 328      "cost_per_1m_out": 0.38,
 329      "cost_per_1m_in_cached": 0.13,
 330      "cost_per_1m_out_cached": 0,
 331      "context_window": 128000,
 332      "default_max_tokens": 8000,
 333      "can_reason": false,
 334      "supports_attachments": false,
 335      "options": {}
 336    },
 337    {
 338      "id": "deepseek/deepseek-v3.2-thinking",
 339      "name": "DeepSeek V3.2 Thinking",
 340      "cost_per_1m_in": 0.28,
 341      "cost_per_1m_out": 0.42,
 342      "cost_per_1m_in_cached": 0.028,
 343      "cost_per_1m_out_cached": 0,
 344      "context_window": 128000,
 345      "default_max_tokens": 8000,
 346      "can_reason": true,
 347      "reasoning_levels": [
 348        "low",
 349        "medium",
 350        "high"
 351      ],
 352      "default_reasoning_effort": "medium",
 353      "supports_attachments": false,
 354      "options": {}
 355    },
 356    {
 357      "id": "deepseek/deepseek-v3.1",
 358      "name": "DeepSeek-V3.1",
 359      "cost_per_1m_in": 0.21,
 360      "cost_per_1m_out": 0.7899999999999999,
 361      "cost_per_1m_in_cached": 0,
 362      "cost_per_1m_out_cached": 0,
 363      "context_window": 163840,
 364      "default_max_tokens": 8000,
 365      "can_reason": true,
 366      "reasoning_levels": [
 367        "low",
 368        "medium",
 369        "high"
 370      ],
 371      "default_reasoning_effort": "medium",
 372      "supports_attachments": false,
 373      "options": {}
 374    },
 375    {
 376      "id": "mistral/devstral-2",
 377      "name": "Devstral 2",
 378      "cost_per_1m_in": 0,
 379      "cost_per_1m_out": 0,
 380      "cost_per_1m_in_cached": 0,
 381      "cost_per_1m_out_cached": 0,
 382      "context_window": 256000,
 383      "default_max_tokens": 8000,
 384      "can_reason": false,
 385      "supports_attachments": false,
 386      "options": {}
 387    },
 388    {
 389      "id": "mistral/devstral-small",
 390      "name": "Devstral Small 1.1",
 391      "cost_per_1m_in": 0.09999999999999999,
 392      "cost_per_1m_out": 0.3,
 393      "cost_per_1m_in_cached": 0,
 394      "cost_per_1m_out_cached": 0,
 395      "context_window": 128000,
 396      "default_max_tokens": 8000,
 397      "can_reason": false,
 398      "supports_attachments": false,
 399      "options": {}
 400    },
 401    {
 402      "id": "mistral/devstral-small-2",
 403      "name": "Devstral Small 2",
 404      "cost_per_1m_in": 0,
 405      "cost_per_1m_out": 0,
 406      "cost_per_1m_in_cached": 0,
 407      "cost_per_1m_out_cached": 0,
 408      "context_window": 256000,
 409      "default_max_tokens": 8000,
 410      "can_reason": false,
 411      "supports_attachments": false,
 412      "options": {}
 413    },
 414    {
 415      "id": "zai/glm-4.5-air",
 416      "name": "GLM 4.5 Air",
 417      "cost_per_1m_in": 0.19999999999999998,
 418      "cost_per_1m_out": 1.1,
 419      "cost_per_1m_in_cached": 0.03,
 420      "cost_per_1m_out_cached": 0,
 421      "context_window": 128000,
 422      "default_max_tokens": 8000,
 423      "can_reason": true,
 424      "reasoning_levels": [
 425        "low",
 426        "medium",
 427        "high"
 428      ],
 429      "default_reasoning_effort": "medium",
 430      "supports_attachments": false,
 431      "options": {}
 432    },
 433    {
 434      "id": "zai/glm-4.5v",
 435      "name": "GLM 4.5V",
 436      "cost_per_1m_in": 0.6,
 437      "cost_per_1m_out": 1.7999999999999998,
 438      "cost_per_1m_in_cached": 0,
 439      "cost_per_1m_out_cached": 0,
 440      "context_window": 65536,
 441      "default_max_tokens": 8000,
 442      "can_reason": true,
 443      "reasoning_levels": [
 444        "low",
 445        "medium",
 446        "high"
 447      ],
 448      "default_reasoning_effort": "medium",
 449      "supports_attachments": true,
 450      "options": {}
 451    },
 452    {
 453      "id": "zai/glm-4.6",
 454      "name": "GLM 4.6",
 455      "cost_per_1m_in": 0.44999999999999996,
 456      "cost_per_1m_out": 1.7999999999999998,
 457      "cost_per_1m_in_cached": 0.11,
 458      "cost_per_1m_out_cached": 0,
 459      "context_window": 200000,
 460      "default_max_tokens": 8000,
 461      "can_reason": true,
 462      "reasoning_levels": [
 463        "low",
 464        "medium",
 465        "high"
 466      ],
 467      "default_reasoning_effort": "medium",
 468      "supports_attachments": false,
 469      "options": {}
 470    },
 471    {
 472      "id": "zai/glm-4.7",
 473      "name": "GLM 4.7",
 474      "cost_per_1m_in": 0.43,
 475      "cost_per_1m_out": 1.75,
 476      "cost_per_1m_in_cached": 0.08,
 477      "cost_per_1m_out_cached": 0,
 478      "context_window": 202752,
 479      "default_max_tokens": 8000,
 480      "can_reason": true,
 481      "reasoning_levels": [
 482        "low",
 483        "medium",
 484        "high"
 485      ],
 486      "default_reasoning_effort": "medium",
 487      "supports_attachments": false,
 488      "options": {}
 489    },
 490    {
 491      "id": "zai/glm-4.7-flashx",
 492      "name": "GLM 4.7 FlashX",
 493      "cost_per_1m_in": 0.06,
 494      "cost_per_1m_out": 0.39999999999999997,
 495      "cost_per_1m_in_cached": 0.01,
 496      "cost_per_1m_out_cached": 0,
 497      "context_window": 200000,
 498      "default_max_tokens": 8000,
 499      "can_reason": true,
 500      "reasoning_levels": [
 501        "low",
 502        "medium",
 503        "high"
 504      ],
 505      "default_reasoning_effort": "medium",
 506      "supports_attachments": false,
 507      "options": {}
 508    },
 509    {
 510      "id": "zai/glm-4.5",
 511      "name": "GLM-4.5",
 512      "cost_per_1m_in": 0.6,
 513      "cost_per_1m_out": 2.2,
 514      "cost_per_1m_in_cached": 0,
 515      "cost_per_1m_out_cached": 0,
 516      "context_window": 131072,
 517      "default_max_tokens": 8000,
 518      "can_reason": true,
 519      "reasoning_levels": [
 520        "low",
 521        "medium",
 522        "high"
 523      ],
 524      "default_reasoning_effort": "medium",
 525      "supports_attachments": false,
 526      "options": {}
 527    },
 528    {
 529      "id": "zai/glm-4.6v",
 530      "name": "GLM-4.6V",
 531      "cost_per_1m_in": 0.3,
 532      "cost_per_1m_out": 0.8999999999999999,
 533      "cost_per_1m_in_cached": 0.049999999999999996,
 534      "cost_per_1m_out_cached": 0,
 535      "context_window": 128000,
 536      "default_max_tokens": 8000,
 537      "can_reason": true,
 538      "reasoning_levels": [
 539        "low",
 540        "medium",
 541        "high"
 542      ],
 543      "default_reasoning_effort": "medium",
 544      "supports_attachments": true,
 545      "options": {}
 546    },
 547    {
 548      "id": "zai/glm-4.6v-flash",
 549      "name": "GLM-4.6V-Flash",
 550      "cost_per_1m_in": 0,
 551      "cost_per_1m_out": 0,
 552      "cost_per_1m_in_cached": 0,
 553      "cost_per_1m_out_cached": 0,
 554      "context_window": 128000,
 555      "default_max_tokens": 8000,
 556      "can_reason": true,
 557      "reasoning_levels": [
 558        "low",
 559        "medium",
 560        "high"
 561      ],
 562      "default_reasoning_effort": "medium",
 563      "supports_attachments": true,
 564      "options": {}
 565    },
 566    {
 567      "id": "zai/glm-5",
 568      "name": "GLM-5",
 569      "cost_per_1m_in": 1,
 570      "cost_per_1m_out": 3.1999999999999997,
 571      "cost_per_1m_in_cached": 0.19999999999999998,
 572      "cost_per_1m_out_cached": 0,
 573      "context_window": 202800,
 574      "default_max_tokens": 8000,
 575      "can_reason": true,
 576      "reasoning_levels": [
 577        "low",
 578        "medium",
 579        "high"
 580      ],
 581      "default_reasoning_effort": "medium",
 582      "supports_attachments": false,
 583      "options": {}
 584    },
 585    {
 586      "id": "openai/gpt-5.1-codex-max",
 587      "name": "GPT 5.1 Codex Max",
 588      "cost_per_1m_in": 1.25,
 589      "cost_per_1m_out": 10,
 590      "cost_per_1m_in_cached": 0.125,
 591      "cost_per_1m_out_cached": 0,
 592      "context_window": 400000,
 593      "default_max_tokens": 8000,
 594      "can_reason": true,
 595      "reasoning_levels": [
 596        "low",
 597        "medium",
 598        "high"
 599      ],
 600      "default_reasoning_effort": "medium",
 601      "supports_attachments": true,
 602      "options": {}
 603    },
 604    {
 605      "id": "openai/gpt-5.1-thinking",
 606      "name": "GPT 5.1 Thinking",
 607      "cost_per_1m_in": 1.25,
 608      "cost_per_1m_out": 10,
 609      "cost_per_1m_in_cached": 0.13,
 610      "cost_per_1m_out_cached": 0,
 611      "context_window": 400000,
 612      "default_max_tokens": 8000,
 613      "can_reason": true,
 614      "reasoning_levels": [
 615        "low",
 616        "medium",
 617        "high"
 618      ],
 619      "default_reasoning_effort": "medium",
 620      "supports_attachments": true,
 621      "options": {}
 622    },
 623    {
 624      "id": "openai/gpt-5.2",
 625      "name": "GPT 5.2",
 626      "cost_per_1m_in": 1.75,
 627      "cost_per_1m_out": 14,
 628      "cost_per_1m_in_cached": 0.18,
 629      "cost_per_1m_out_cached": 0,
 630      "context_window": 400000,
 631      "default_max_tokens": 8000,
 632      "can_reason": true,
 633      "reasoning_levels": [
 634        "low",
 635        "medium",
 636        "high"
 637      ],
 638      "default_reasoning_effort": "medium",
 639      "supports_attachments": true,
 640      "options": {}
 641    },
 642    {
 643      "id": "openai/gpt-5.2-pro",
 644      "name": "GPT 5.2 ",
 645      "cost_per_1m_in": 21,
 646      "cost_per_1m_out": 168,
 647      "cost_per_1m_in_cached": 0,
 648      "cost_per_1m_out_cached": 0,
 649      "context_window": 400000,
 650      "default_max_tokens": 8000,
 651      "can_reason": true,
 652      "reasoning_levels": [
 653        "low",
 654        "medium",
 655        "high"
 656      ],
 657      "default_reasoning_effort": "medium",
 658      "supports_attachments": true,
 659      "options": {}
 660    },
 661    {
 662      "id": "openai/gpt-4-turbo",
 663      "name": "GPT-4 Turbo",
 664      "cost_per_1m_in": 10,
 665      "cost_per_1m_out": 30,
 666      "cost_per_1m_in_cached": 0,
 667      "cost_per_1m_out_cached": 0,
 668      "context_window": 128000,
 669      "default_max_tokens": 4096,
 670      "can_reason": false,
 671      "supports_attachments": true,
 672      "options": {}
 673    },
 674    {
 675      "id": "openai/gpt-4.1",
 676      "name": "GPT-4.1",
 677      "cost_per_1m_in": 2,
 678      "cost_per_1m_out": 8,
 679      "cost_per_1m_in_cached": 0.5,
 680      "cost_per_1m_out_cached": 0,
 681      "context_window": 1047576,
 682      "default_max_tokens": 8000,
 683      "can_reason": false,
 684      "supports_attachments": true,
 685      "options": {}
 686    },
 687    {
 688      "id": "openai/gpt-4.1-mini",
 689      "name": "GPT-4.1 mini",
 690      "cost_per_1m_in": 0.39999999999999997,
 691      "cost_per_1m_out": 1.5999999999999999,
 692      "cost_per_1m_in_cached": 0.09999999999999999,
 693      "cost_per_1m_out_cached": 0,
 694      "context_window": 1047576,
 695      "default_max_tokens": 8000,
 696      "can_reason": false,
 697      "supports_attachments": true,
 698      "options": {}
 699    },
 700    {
 701      "id": "openai/gpt-4.1-nano",
 702      "name": "GPT-4.1 nano",
 703      "cost_per_1m_in": 0.09999999999999999,
 704      "cost_per_1m_out": 0.39999999999999997,
 705      "cost_per_1m_in_cached": 0.03,
 706      "cost_per_1m_out_cached": 0,
 707      "context_window": 1047576,
 708      "default_max_tokens": 8000,
 709      "can_reason": false,
 710      "supports_attachments": true,
 711      "options": {}
 712    },
 713    {
 714      "id": "openai/gpt-4o",
 715      "name": "GPT-4o",
 716      "cost_per_1m_in": 2.5,
 717      "cost_per_1m_out": 10,
 718      "cost_per_1m_in_cached": 1.25,
 719      "cost_per_1m_out_cached": 0,
 720      "context_window": 128000,
 721      "default_max_tokens": 8000,
 722      "can_reason": false,
 723      "supports_attachments": true,
 724      "options": {}
 725    },
 726    {
 727      "id": "openai/gpt-4o-mini",
 728      "name": "GPT-4o mini",
 729      "cost_per_1m_in": 0.15,
 730      "cost_per_1m_out": 0.6,
 731      "cost_per_1m_in_cached": 0.075,
 732      "cost_per_1m_out_cached": 0,
 733      "context_window": 128000,
 734      "default_max_tokens": 8000,
 735      "can_reason": false,
 736      "supports_attachments": true,
 737      "options": {}
 738    },
 739    {
 740      "id": "openai/gpt-5",
 741      "name": "GPT-5",
 742      "cost_per_1m_in": 1.25,
 743      "cost_per_1m_out": 10,
 744      "cost_per_1m_in_cached": 0.13,
 745      "cost_per_1m_out_cached": 0,
 746      "context_window": 400000,
 747      "default_max_tokens": 8000,
 748      "can_reason": true,
 749      "reasoning_levels": [
 750        "low",
 751        "medium",
 752        "high"
 753      ],
 754      "default_reasoning_effort": "medium",
 755      "supports_attachments": true,
 756      "options": {}
 757    },
 758    {
 759      "id": "openai/gpt-5-chat",
 760      "name": "GPT-5 Chat",
 761      "cost_per_1m_in": 1.25,
 762      "cost_per_1m_out": 10,
 763      "cost_per_1m_in_cached": 0.125,
 764      "cost_per_1m_out_cached": 0,
 765      "context_window": 128000,
 766      "default_max_tokens": 8000,
 767      "can_reason": true,
 768      "reasoning_levels": [
 769        "low",
 770        "medium",
 771        "high"
 772      ],
 773      "default_reasoning_effort": "medium",
 774      "supports_attachments": true,
 775      "options": {}
 776    },
 777    {
 778      "id": "openai/gpt-5-mini",
 779      "name": "GPT-5 mini",
 780      "cost_per_1m_in": 0.25,
 781      "cost_per_1m_out": 2,
 782      "cost_per_1m_in_cached": 0.03,
 783      "cost_per_1m_out_cached": 0,
 784      "context_window": 400000,
 785      "default_max_tokens": 8000,
 786      "can_reason": true,
 787      "reasoning_levels": [
 788        "low",
 789        "medium",
 790        "high"
 791      ],
 792      "default_reasoning_effort": "medium",
 793      "supports_attachments": true,
 794      "options": {}
 795    },
 796    {
 797      "id": "openai/gpt-5-nano",
 798      "name": "GPT-5 nano",
 799      "cost_per_1m_in": 0.049999999999999996,
 800      "cost_per_1m_out": 0.39999999999999997,
 801      "cost_per_1m_in_cached": 0.01,
 802      "cost_per_1m_out_cached": 0,
 803      "context_window": 400000,
 804      "default_max_tokens": 8000,
 805      "can_reason": true,
 806      "reasoning_levels": [
 807        "low",
 808        "medium",
 809        "high"
 810      ],
 811      "default_reasoning_effort": "medium",
 812      "supports_attachments": true,
 813      "options": {}
 814    },
 815    {
 816      "id": "openai/gpt-5-pro",
 817      "name": "GPT-5 pro",
 818      "cost_per_1m_in": 15,
 819      "cost_per_1m_out": 120,
 820      "cost_per_1m_in_cached": 0,
 821      "cost_per_1m_out_cached": 0,
 822      "context_window": 400000,
 823      "default_max_tokens": 8000,
 824      "can_reason": true,
 825      "reasoning_levels": [
 826        "low",
 827        "medium",
 828        "high"
 829      ],
 830      "default_reasoning_effort": "medium",
 831      "supports_attachments": true,
 832      "options": {}
 833    },
 834    {
 835      "id": "openai/gpt-5-codex",
 836      "name": "GPT-5-Codex",
 837      "cost_per_1m_in": 1.25,
 838      "cost_per_1m_out": 10,
 839      "cost_per_1m_in_cached": 0.13,
 840      "cost_per_1m_out_cached": 0,
 841      "context_window": 400000,
 842      "default_max_tokens": 8000,
 843      "can_reason": true,
 844      "reasoning_levels": [
 845        "low",
 846        "medium",
 847        "high"
 848      ],
 849      "default_reasoning_effort": "medium",
 850      "supports_attachments": true,
 851      "options": {}
 852    },
 853    {
 854      "id": "openai/gpt-5.1-codex-mini",
 855      "name": "GPT-5.1 Codex mini",
 856      "cost_per_1m_in": 0.25,
 857      "cost_per_1m_out": 2,
 858      "cost_per_1m_in_cached": 0.024999999999999998,
 859      "cost_per_1m_out_cached": 0,
 860      "context_window": 400000,
 861      "default_max_tokens": 8000,
 862      "can_reason": true,
 863      "reasoning_levels": [
 864        "low",
 865        "medium",
 866        "high"
 867      ],
 868      "default_reasoning_effort": "medium",
 869      "supports_attachments": true,
 870      "options": {}
 871    },
 872    {
 873      "id": "openai/gpt-5.1-instant",
 874      "name": "GPT-5.1 Instant",
 875      "cost_per_1m_in": 1.25,
 876      "cost_per_1m_out": 10,
 877      "cost_per_1m_in_cached": 0.13,
 878      "cost_per_1m_out_cached": 0,
 879      "context_window": 128000,
 880      "default_max_tokens": 8000,
 881      "can_reason": true,
 882      "reasoning_levels": [
 883        "low",
 884        "medium",
 885        "high"
 886      ],
 887      "default_reasoning_effort": "medium",
 888      "supports_attachments": true,
 889      "options": {}
 890    },
 891    {
 892      "id": "openai/gpt-5.1-codex",
 893      "name": "GPT-5.1-Codex",
 894      "cost_per_1m_in": 1.25,
 895      "cost_per_1m_out": 10,
 896      "cost_per_1m_in_cached": 0.13,
 897      "cost_per_1m_out_cached": 0,
 898      "context_window": 400000,
 899      "default_max_tokens": 8000,
 900      "can_reason": true,
 901      "reasoning_levels": [
 902        "low",
 903        "medium",
 904        "high"
 905      ],
 906      "default_reasoning_effort": "medium",
 907      "supports_attachments": true,
 908      "options": {}
 909    },
 910    {
 911      "id": "openai/gpt-5.2-chat",
 912      "name": "GPT-5.2 Chat",
 913      "cost_per_1m_in": 1.75,
 914      "cost_per_1m_out": 14,
 915      "cost_per_1m_in_cached": 0.175,
 916      "cost_per_1m_out_cached": 0,
 917      "context_window": 128000,
 918      "default_max_tokens": 8000,
 919      "can_reason": true,
 920      "reasoning_levels": [
 921        "low",
 922        "medium",
 923        "high"
 924      ],
 925      "default_reasoning_effort": "medium",
 926      "supports_attachments": true,
 927      "options": {}
 928    },
 929    {
 930      "id": "openai/gpt-5.2-codex",
 931      "name": "GPT-5.2-Codex",
 932      "cost_per_1m_in": 1.75,
 933      "cost_per_1m_out": 14,
 934      "cost_per_1m_in_cached": 0.175,
 935      "cost_per_1m_out_cached": 0,
 936      "context_window": 400000,
 937      "default_max_tokens": 8000,
 938      "can_reason": true,
 939      "reasoning_levels": [
 940        "low",
 941        "medium",
 942        "high"
 943      ],
 944      "default_reasoning_effort": "medium",
 945      "supports_attachments": true,
 946      "options": {}
 947    },
 948    {
 949      "id": "google/gemini-2.5-flash",
 950      "name": "Gemini 2.5 Flash",
 951      "cost_per_1m_in": 0.3,
 952      "cost_per_1m_out": 2.5,
 953      "cost_per_1m_in_cached": 0,
 954      "cost_per_1m_out_cached": 0,
 955      "context_window": 1000000,
 956      "default_max_tokens": 8000,
 957      "can_reason": true,
 958      "reasoning_levels": [
 959        "low",
 960        "medium",
 961        "high"
 962      ],
 963      "default_reasoning_effort": "medium",
 964      "supports_attachments": false,
 965      "options": {}
 966    },
 967    {
 968      "id": "google/gemini-2.5-flash-lite",
 969      "name": "Gemini 2.5 Flash Lite",
 970      "cost_per_1m_in": 0.09999999999999999,
 971      "cost_per_1m_out": 0.39999999999999997,
 972      "cost_per_1m_in_cached": 0.01,
 973      "cost_per_1m_out_cached": 0,
 974      "context_window": 1048576,
 975      "default_max_tokens": 8000,
 976      "can_reason": true,
 977      "reasoning_levels": [
 978        "low",
 979        "medium",
 980        "high"
 981      ],
 982      "default_reasoning_effort": "medium",
 983      "supports_attachments": true,
 984      "options": {}
 985    },
 986    {
 987      "id": "google/gemini-2.5-flash-lite-preview-09-2025",
 988      "name": "Gemini 2.5 Flash Lite Preview 09-2025",
 989      "cost_per_1m_in": 0.09999999999999999,
 990      "cost_per_1m_out": 0.39999999999999997,
 991      "cost_per_1m_in_cached": 0.01,
 992      "cost_per_1m_out_cached": 0,
 993      "context_window": 1048576,
 994      "default_max_tokens": 8000,
 995      "can_reason": true,
 996      "reasoning_levels": [
 997        "low",
 998        "medium",
 999        "high"
1000      ],
1001      "default_reasoning_effort": "medium",
1002      "supports_attachments": true,
1003      "options": {}
1004    },
1005    {
1006      "id": "google/gemini-2.5-flash-preview-09-2025",
1007      "name": "Gemini 2.5 Flash Preview 09-2025",
1008      "cost_per_1m_in": 0.3,
1009      "cost_per_1m_out": 2.5,
1010      "cost_per_1m_in_cached": 0.03,
1011      "cost_per_1m_out_cached": 0,
1012      "context_window": 1000000,
1013      "default_max_tokens": 8000,
1014      "can_reason": true,
1015      "reasoning_levels": [
1016        "low",
1017        "medium",
1018        "high"
1019      ],
1020      "default_reasoning_effort": "medium",
1021      "supports_attachments": true,
1022      "options": {}
1023    },
1024    {
1025      "id": "google/gemini-2.5-pro",
1026      "name": "Gemini 2.5 Pro",
1027      "cost_per_1m_in": 1.25,
1028      "cost_per_1m_out": 10,
1029      "cost_per_1m_in_cached": 0,
1030      "cost_per_1m_out_cached": 0,
1031      "context_window": 1048576,
1032      "default_max_tokens": 8000,
1033      "can_reason": true,
1034      "reasoning_levels": [
1035        "low",
1036        "medium",
1037        "high"
1038      ],
1039      "default_reasoning_effort": "medium",
1040      "supports_attachments": false,
1041      "options": {}
1042    },
1043    {
1044      "id": "google/gemini-3-flash",
1045      "name": "Gemini 3 Flash",
1046      "cost_per_1m_in": 0.5,
1047      "cost_per_1m_out": 3,
1048      "cost_per_1m_in_cached": 0.049999999999999996,
1049      "cost_per_1m_out_cached": 0,
1050      "context_window": 1000000,
1051      "default_max_tokens": 8000,
1052      "can_reason": true,
1053      "reasoning_levels": [
1054        "low",
1055        "medium",
1056        "high"
1057      ],
1058      "default_reasoning_effort": "medium",
1059      "supports_attachments": true,
1060      "options": {}
1061    },
1062    {
1063      "id": "google/gemini-3-pro-preview",
1064      "name": "Gemini 3 Pro Preview",
1065      "cost_per_1m_in": 2,
1066      "cost_per_1m_out": 12,
1067      "cost_per_1m_in_cached": 0.19999999999999998,
1068      "cost_per_1m_out_cached": 0,
1069      "context_window": 1000000,
1070      "default_max_tokens": 8000,
1071      "can_reason": true,
1072      "reasoning_levels": [
1073        "low",
1074        "medium",
1075        "high"
1076      ],
1077      "default_reasoning_effort": "medium",
1078      "supports_attachments": true,
1079      "options": {}
1080    },
1081    {
1082      "id": "google/gemini-3.1-pro-preview",
1083      "name": "Gemini 3.1 Pro Preview",
1084      "cost_per_1m_in": 2,
1085      "cost_per_1m_out": 12,
1086      "cost_per_1m_in_cached": 0.19999999999999998,
1087      "cost_per_1m_out_cached": 0,
1088      "context_window": 1000000,
1089      "default_max_tokens": 8000,
1090      "can_reason": true,
1091      "reasoning_levels": [
1092        "low",
1093        "medium",
1094        "high"
1095      ],
1096      "default_reasoning_effort": "medium",
1097      "supports_attachments": true,
1098      "options": {}
1099    },
1100    {
1101      "id": "xai/grok-2-vision",
1102      "name": "Grok 2 Vision",
1103      "cost_per_1m_in": 2,
1104      "cost_per_1m_out": 10,
1105      "cost_per_1m_in_cached": 0,
1106      "cost_per_1m_out_cached": 0,
1107      "context_window": 32768,
1108      "default_max_tokens": 8000,
1109      "can_reason": false,
1110      "supports_attachments": true,
1111      "options": {}
1112    },
1113    {
1114      "id": "xai/grok-3",
1115      "name": "Grok 3 Beta",
1116      "cost_per_1m_in": 3,
1117      "cost_per_1m_out": 15,
1118      "cost_per_1m_in_cached": 0,
1119      "cost_per_1m_out_cached": 0,
1120      "context_window": 131072,
1121      "default_max_tokens": 8000,
1122      "can_reason": false,
1123      "supports_attachments": false,
1124      "options": {}
1125    },
1126    {
1127      "id": "xai/grok-3-fast",
1128      "name": "Grok 3 Fast Beta",
1129      "cost_per_1m_in": 5,
1130      "cost_per_1m_out": 25,
1131      "cost_per_1m_in_cached": 0,
1132      "cost_per_1m_out_cached": 0,
1133      "context_window": 131072,
1134      "default_max_tokens": 8000,
1135      "can_reason": false,
1136      "supports_attachments": false,
1137      "options": {}
1138    },
1139    {
1140      "id": "xai/grok-3-mini",
1141      "name": "Grok 3 Mini Beta",
1142      "cost_per_1m_in": 0.3,
1143      "cost_per_1m_out": 0.5,
1144      "cost_per_1m_in_cached": 0,
1145      "cost_per_1m_out_cached": 0,
1146      "context_window": 131072,
1147      "default_max_tokens": 8000,
1148      "can_reason": false,
1149      "supports_attachments": false,
1150      "options": {}
1151    },
1152    {
1153      "id": "xai/grok-3-mini-fast",
1154      "name": "Grok 3 Mini Fast Beta",
1155      "cost_per_1m_in": 0.6,
1156      "cost_per_1m_out": 4,
1157      "cost_per_1m_in_cached": 0,
1158      "cost_per_1m_out_cached": 0,
1159      "context_window": 131072,
1160      "default_max_tokens": 8000,
1161      "can_reason": false,
1162      "supports_attachments": false,
1163      "options": {}
1164    },
1165    {
1166      "id": "xai/grok-4",
1167      "name": "Grok 4",
1168      "cost_per_1m_in": 3,
1169      "cost_per_1m_out": 15,
1170      "cost_per_1m_in_cached": 0,
1171      "cost_per_1m_out_cached": 0,
1172      "context_window": 256000,
1173      "default_max_tokens": 8000,
1174      "can_reason": true,
1175      "reasoning_levels": [
1176        "low",
1177        "medium",
1178        "high"
1179      ],
1180      "default_reasoning_effort": "medium",
1181      "supports_attachments": true,
1182      "options": {}
1183    },
1184    {
1185      "id": "xai/grok-4-fast-non-reasoning",
1186      "name": "Grok 4 Fast Non-Reasoning",
1187      "cost_per_1m_in": 0.19999999999999998,
1188      "cost_per_1m_out": 0.5,
1189      "cost_per_1m_in_cached": 0.049999999999999996,
1190      "cost_per_1m_out_cached": 0,
1191      "context_window": 2000000,
1192      "default_max_tokens": 8000,
1193      "can_reason": false,
1194      "supports_attachments": false,
1195      "options": {}
1196    },
1197    {
1198      "id": "xai/grok-4-fast-reasoning",
1199      "name": "Grok 4 Fast Reasoning",
1200      "cost_per_1m_in": 0.19999999999999998,
1201      "cost_per_1m_out": 0.5,
1202      "cost_per_1m_in_cached": 0.049999999999999996,
1203      "cost_per_1m_out_cached": 0,
1204      "context_window": 2000000,
1205      "default_max_tokens": 8000,
1206      "can_reason": true,
1207      "reasoning_levels": [
1208        "low",
1209        "medium",
1210        "high"
1211      ],
1212      "default_reasoning_effort": "medium",
1213      "supports_attachments": false,
1214      "options": {}
1215    },
1216    {
1217      "id": "xai/grok-4.1-fast-non-reasoning",
1218      "name": "Grok 4.1 Fast Non-Reasoning",
1219      "cost_per_1m_in": 0.19999999999999998,
1220      "cost_per_1m_out": 0.5,
1221      "cost_per_1m_in_cached": 0.049999999999999996,
1222      "cost_per_1m_out_cached": 0,
1223      "context_window": 2000000,
1224      "default_max_tokens": 8000,
1225      "can_reason": false,
1226      "supports_attachments": false,
1227      "options": {}
1228    },
1229    {
1230      "id": "xai/grok-4.1-fast-reasoning",
1231      "name": "Grok 4.1 Fast Reasoning",
1232      "cost_per_1m_in": 0.19999999999999998,
1233      "cost_per_1m_out": 0.5,
1234      "cost_per_1m_in_cached": 0.049999999999999996,
1235      "cost_per_1m_out_cached": 0,
1236      "context_window": 2000000,
1237      "default_max_tokens": 8000,
1238      "can_reason": true,
1239      "reasoning_levels": [
1240        "low",
1241        "medium",
1242        "high"
1243      ],
1244      "default_reasoning_effort": "medium",
1245      "supports_attachments": false,
1246      "options": {}
1247    },
1248    {
1249      "id": "xai/grok-code-fast-1",
1250      "name": "Grok Code Fast 1",
1251      "cost_per_1m_in": 0.19999999999999998,
1252      "cost_per_1m_out": 1.5,
1253      "cost_per_1m_in_cached": 0.02,
1254      "cost_per_1m_out_cached": 0,
1255      "context_window": 256000,
1256      "default_max_tokens": 8000,
1257      "can_reason": true,
1258      "reasoning_levels": [
1259        "low",
1260        "medium",
1261        "high"
1262      ],
1263      "default_reasoning_effort": "medium",
1264      "supports_attachments": false,
1265      "options": {}
1266    },
1267    {
1268      "id": "prime-intellect/intellect-3",
1269      "name": "INTELLECT 3",
1270      "cost_per_1m_in": 0.19999999999999998,
1271      "cost_per_1m_out": 1.1,
1272      "cost_per_1m_in_cached": 0,
1273      "cost_per_1m_out_cached": 0,
1274      "context_window": 131072,
1275      "default_max_tokens": 8000,
1276      "can_reason": true,
1277      "reasoning_levels": [
1278        "low",
1279        "medium",
1280        "high"
1281      ],
1282      "default_reasoning_effort": "medium",
1283      "supports_attachments": false,
1284      "options": {}
1285    },
1286    {
1287      "id": "moonshotai/kimi-k2",
1288      "name": "Kimi K2",
1289      "cost_per_1m_in": 0.5,
1290      "cost_per_1m_out": 2,
1291      "cost_per_1m_in_cached": 0,
1292      "cost_per_1m_out_cached": 0,
1293      "context_window": 131072,
1294      "default_max_tokens": 8000,
1295      "can_reason": false,
1296      "supports_attachments": false,
1297      "options": {}
1298    },
1299    {
1300      "id": "moonshotai/kimi-k2-thinking",
1301      "name": "Kimi K2 Thinking",
1302      "cost_per_1m_in": 0.47,
1303      "cost_per_1m_out": 2,
1304      "cost_per_1m_in_cached": 0.14100000000000001,
1305      "cost_per_1m_out_cached": 0,
1306      "context_window": 216144,
1307      "default_max_tokens": 8000,
1308      "can_reason": true,
1309      "reasoning_levels": [
1310        "low",
1311        "medium",
1312        "high"
1313      ],
1314      "default_reasoning_effort": "medium",
1315      "supports_attachments": false,
1316      "options": {}
1317    },
1318    {
1319      "id": "moonshotai/kimi-k2-thinking-turbo",
1320      "name": "Kimi K2 Thinking Turbo",
1321      "cost_per_1m_in": 1.15,
1322      "cost_per_1m_out": 8,
1323      "cost_per_1m_in_cached": 0.15,
1324      "cost_per_1m_out_cached": 0,
1325      "context_window": 262114,
1326      "default_max_tokens": 8000,
1327      "can_reason": true,
1328      "reasoning_levels": [
1329        "low",
1330        "medium",
1331        "high"
1332      ],
1333      "default_reasoning_effort": "medium",
1334      "supports_attachments": false,
1335      "options": {}
1336    },
1337    {
1338      "id": "moonshotai/kimi-k2-turbo",
1339      "name": "Kimi K2 Turbo",
1340      "cost_per_1m_in": 2.4,
1341      "cost_per_1m_out": 10,
1342      "cost_per_1m_in_cached": 0,
1343      "cost_per_1m_out_cached": 0,
1344      "context_window": 256000,
1345      "default_max_tokens": 8000,
1346      "can_reason": false,
1347      "supports_attachments": false,
1348      "options": {}
1349    },
1350    {
1351      "id": "moonshotai/kimi-k2.5",
1352      "name": "Kimi K2.5",
1353      "cost_per_1m_in": 0.5,
1354      "cost_per_1m_out": 2.8,
1355      "cost_per_1m_in_cached": 0,
1356      "cost_per_1m_out_cached": 0,
1357      "context_window": 256000,
1358      "default_max_tokens": 8000,
1359      "can_reason": true,
1360      "reasoning_levels": [
1361        "low",
1362        "medium",
1363        "high"
1364      ],
1365      "default_reasoning_effort": "medium",
1366      "supports_attachments": true,
1367      "options": {}
1368    },
1369    {
1370      "id": "meta/llama-3.1-70b",
1371      "name": "Llama 3.1 70B Instruct",
1372      "cost_per_1m_in": 0.39999999999999997,
1373      "cost_per_1m_out": 0.39999999999999997,
1374      "cost_per_1m_in_cached": 0,
1375      "cost_per_1m_out_cached": 0,
1376      "context_window": 131072,
1377      "default_max_tokens": 8000,
1378      "can_reason": false,
1379      "supports_attachments": false,
1380      "options": {}
1381    },
1382    {
1383      "id": "meta/llama-3.1-8b",
1384      "name": "Llama 3.1 8B Instruct",
1385      "cost_per_1m_in": 0.03,
1386      "cost_per_1m_out": 0.049999999999999996,
1387      "cost_per_1m_in_cached": 0,
1388      "cost_per_1m_out_cached": 0,
1389      "context_window": 131072,
1390      "default_max_tokens": 8000,
1391      "can_reason": false,
1392      "supports_attachments": false,
1393      "options": {}
1394    },
1395    {
1396      "id": "meta/llama-3.2-11b",
1397      "name": "Llama 3.2 11B Vision Instruct",
1398      "cost_per_1m_in": 0.16,
1399      "cost_per_1m_out": 0.16,
1400      "cost_per_1m_in_cached": 0,
1401      "cost_per_1m_out_cached": 0,
1402      "context_window": 128000,
1403      "default_max_tokens": 8000,
1404      "can_reason": false,
1405      "supports_attachments": true,
1406      "options": {}
1407    },
1408    {
1409      "id": "meta/llama-3.2-90b",
1410      "name": "Llama 3.2 90B Vision Instruct",
1411      "cost_per_1m_in": 0.72,
1412      "cost_per_1m_out": 0.72,
1413      "cost_per_1m_in_cached": 0,
1414      "cost_per_1m_out_cached": 0,
1415      "context_window": 128000,
1416      "default_max_tokens": 8000,
1417      "can_reason": false,
1418      "supports_attachments": true,
1419      "options": {}
1420    },
1421    {
1422      "id": "meta/llama-3.3-70b",
1423      "name": "Llama 3.3 70B Instruct",
1424      "cost_per_1m_in": 0.72,
1425      "cost_per_1m_out": 0.72,
1426      "cost_per_1m_in_cached": 0,
1427      "cost_per_1m_out_cached": 0,
1428      "context_window": 128000,
1429      "default_max_tokens": 8000,
1430      "can_reason": false,
1431      "supports_attachments": false,
1432      "options": {}
1433    },
1434    {
1435      "id": "meta/llama-4-maverick",
1436      "name": "Llama 4 Maverick 17B Instruct",
1437      "cost_per_1m_in": 0.15,
1438      "cost_per_1m_out": 0.6,
1439      "cost_per_1m_in_cached": 0,
1440      "cost_per_1m_out_cached": 0,
1441      "context_window": 131072,
1442      "default_max_tokens": 8000,
1443      "can_reason": false,
1444      "supports_attachments": true,
1445      "options": {}
1446    },
1447    {
1448      "id": "meta/llama-4-scout",
1449      "name": "Llama 4 Scout 17B Instruct",
1450      "cost_per_1m_in": 0.08,
1451      "cost_per_1m_out": 0.3,
1452      "cost_per_1m_in_cached": 0,
1453      "cost_per_1m_out_cached": 0,
1454      "context_window": 131072,
1455      "default_max_tokens": 8000,
1456      "can_reason": false,
1457      "supports_attachments": true,
1458      "options": {}
1459    },
1460    {
1461      "id": "meituan/longcat-flash-chat",
1462      "name": "LongCat Flash Chat",
1463      "cost_per_1m_in": 0,
1464      "cost_per_1m_out": 0,
1465      "cost_per_1m_in_cached": 0,
1466      "cost_per_1m_out_cached": 0,
1467      "context_window": 128000,
1468      "default_max_tokens": 8000,
1469      "can_reason": false,
1470      "supports_attachments": false,
1471      "options": {}
1472    },
1473    {
1474      "id": "meituan/longcat-flash-thinking",
1475      "name": "LongCat Flash Thinking",
1476      "cost_per_1m_in": 0.15,
1477      "cost_per_1m_out": 1.5,
1478      "cost_per_1m_in_cached": 0,
1479      "cost_per_1m_out_cached": 0,
1480      "context_window": 128000,
1481      "default_max_tokens": 8000,
1482      "can_reason": true,
1483      "reasoning_levels": [
1484        "low",
1485        "medium",
1486        "high"
1487      ],
1488      "default_reasoning_effort": "medium",
1489      "supports_attachments": false,
1490      "options": {}
1491    },
1492    {
1493      "id": "inception/mercury-coder-small",
1494      "name": "Mercury Coder Small Beta",
1495      "cost_per_1m_in": 0.25,
1496      "cost_per_1m_out": 1,
1497      "cost_per_1m_in_cached": 0,
1498      "cost_per_1m_out_cached": 0,
1499      "context_window": 32000,
1500      "default_max_tokens": 8000,
1501      "can_reason": false,
1502      "supports_attachments": false,
1503      "options": {}
1504    },
1505    {
1506      "id": "xiaomi/mimo-v2-flash",
1507      "name": "MiMo V2 Flash",
1508      "cost_per_1m_in": 0.09,
1509      "cost_per_1m_out": 0.29,
1510      "cost_per_1m_in_cached": 0,
1511      "cost_per_1m_out_cached": 0,
1512      "context_window": 262144,
1513      "default_max_tokens": 8000,
1514      "can_reason": true,
1515      "reasoning_levels": [
1516        "low",
1517        "medium",
1518        "high"
1519      ],
1520      "default_reasoning_effort": "medium",
1521      "supports_attachments": false,
1522      "options": {}
1523    },
1524    {
1525      "id": "minimax/minimax-m2",
1526      "name": "MiniMax M2",
1527      "cost_per_1m_in": 0.3,
1528      "cost_per_1m_out": 1.2,
1529      "cost_per_1m_in_cached": 0.03,
1530      "cost_per_1m_out_cached": 0.375,
1531      "context_window": 205000,
1532      "default_max_tokens": 8000,
1533      "can_reason": true,
1534      "reasoning_levels": [
1535        "low",
1536        "medium",
1537        "high"
1538      ],
1539      "default_reasoning_effort": "medium",
1540      "supports_attachments": false,
1541      "options": {}
1542    },
1543    {
1544      "id": "minimax/minimax-m2.1",
1545      "name": "MiniMax M2.1",
1546      "cost_per_1m_in": 0.3,
1547      "cost_per_1m_out": 1.2,
1548      "cost_per_1m_in_cached": 0.15,
1549      "cost_per_1m_out_cached": 0,
1550      "context_window": 204800,
1551      "default_max_tokens": 8000,
1552      "can_reason": true,
1553      "reasoning_levels": [
1554        "low",
1555        "medium",
1556        "high"
1557      ],
1558      "default_reasoning_effort": "medium",
1559      "supports_attachments": false,
1560      "options": {}
1561    },
1562    {
1563      "id": "minimax/minimax-m2.1-lightning",
1564      "name": "MiniMax M2.1 Lightning",
1565      "cost_per_1m_in": 0.3,
1566      "cost_per_1m_out": 2.4,
1567      "cost_per_1m_in_cached": 0.03,
1568      "cost_per_1m_out_cached": 0.375,
1569      "context_window": 204800,
1570      "default_max_tokens": 8000,
1571      "can_reason": true,
1572      "reasoning_levels": [
1573        "low",
1574        "medium",
1575        "high"
1576      ],
1577      "default_reasoning_effort": "medium",
1578      "supports_attachments": false,
1579      "options": {}
1580    },
1581    {
1582      "id": "minimax/minimax-m2.5",
1583      "name": "MiniMax M2.5",
1584      "cost_per_1m_in": 0.3,
1585      "cost_per_1m_out": 1.2,
1586      "cost_per_1m_in_cached": 0.03,
1587      "cost_per_1m_out_cached": 0.375,
1588      "context_window": 204800,
1589      "default_max_tokens": 8000,
1590      "can_reason": true,
1591      "reasoning_levels": [
1592        "low",
1593        "medium",
1594        "high"
1595      ],
1596      "default_reasoning_effort": "medium",
1597      "supports_attachments": false,
1598      "options": {}
1599    },
1600    {
1601      "id": "mistral/ministral-3b",
1602      "name": "Ministral 3B",
1603      "cost_per_1m_in": 0.04,
1604      "cost_per_1m_out": 0.04,
1605      "cost_per_1m_in_cached": 0,
1606      "cost_per_1m_out_cached": 0,
1607      "context_window": 128000,
1608      "default_max_tokens": 4000,
1609      "can_reason": false,
1610      "supports_attachments": false,
1611      "options": {}
1612    },
1613    {
1614      "id": "mistral/ministral-8b",
1615      "name": "Ministral 8B",
1616      "cost_per_1m_in": 0.09999999999999999,
1617      "cost_per_1m_out": 0.09999999999999999,
1618      "cost_per_1m_in_cached": 0,
1619      "cost_per_1m_out_cached": 0,
1620      "context_window": 128000,
1621      "default_max_tokens": 4000,
1622      "can_reason": false,
1623      "supports_attachments": false,
1624      "options": {}
1625    },
1626    {
1627      "id": "mistral/codestral",
1628      "name": "Mistral Codestral",
1629      "cost_per_1m_in": 0.3,
1630      "cost_per_1m_out": 0.8999999999999999,
1631      "cost_per_1m_in_cached": 0,
1632      "cost_per_1m_out_cached": 0,
1633      "context_window": 128000,
1634      "default_max_tokens": 4000,
1635      "can_reason": false,
1636      "supports_attachments": false,
1637      "options": {}
1638    },
1639    {
1640      "id": "mistral/mistral-medium",
1641      "name": "Mistral Medium 3.1",
1642      "cost_per_1m_in": 0.39999999999999997,
1643      "cost_per_1m_out": 2,
1644      "cost_per_1m_in_cached": 0,
1645      "cost_per_1m_out_cached": 0,
1646      "context_window": 128000,
1647      "default_max_tokens": 8000,
1648      "can_reason": false,
1649      "supports_attachments": true,
1650      "options": {}
1651    },
1652    {
1653      "id": "mistral/mistral-small",
1654      "name": "Mistral Small",
1655      "cost_per_1m_in": 0.09999999999999999,
1656      "cost_per_1m_out": 0.3,
1657      "cost_per_1m_in_cached": 0,
1658      "cost_per_1m_out_cached": 0,
1659      "context_window": 32000,
1660      "default_max_tokens": 4000,
1661      "can_reason": false,
1662      "supports_attachments": true,
1663      "options": {}
1664    },
1665    {
1666      "id": "nvidia/nemotron-nano-12b-v2-vl",
1667      "name": "Nvidia Nemotron Nano 12B V2 VL",
1668      "cost_per_1m_in": 0.19999999999999998,
1669      "cost_per_1m_out": 0.6,
1670      "cost_per_1m_in_cached": 0,
1671      "cost_per_1m_out_cached": 0,
1672      "context_window": 131072,
1673      "default_max_tokens": 8000,
1674      "can_reason": true,
1675      "reasoning_levels": [
1676        "low",
1677        "medium",
1678        "high"
1679      ],
1680      "default_reasoning_effort": "medium",
1681      "supports_attachments": true,
1682      "options": {}
1683    },
1684    {
1685      "id": "nvidia/nemotron-nano-9b-v2",
1686      "name": "Nvidia Nemotron Nano 9B V2",
1687      "cost_per_1m_in": 0.04,
1688      "cost_per_1m_out": 0.16,
1689      "cost_per_1m_in_cached": 0,
1690      "cost_per_1m_out_cached": 0,
1691      "context_window": 131072,
1692      "default_max_tokens": 8000,
1693      "can_reason": true,
1694      "reasoning_levels": [
1695        "low",
1696        "medium",
1697        "high"
1698      ],
1699      "default_reasoning_effort": "medium",
1700      "supports_attachments": false,
1701      "options": {}
1702    },
1703    {
1704      "id": "mistral/pixtral-12b",
1705      "name": "Pixtral 12B 2409",
1706      "cost_per_1m_in": 0.15,
1707      "cost_per_1m_out": 0.15,
1708      "cost_per_1m_in_cached": 0,
1709      "cost_per_1m_out_cached": 0,
1710      "context_window": 128000,
1711      "default_max_tokens": 4000,
1712      "can_reason": false,
1713      "supports_attachments": true,
1714      "options": {}
1715    },
1716    {
1717      "id": "mistral/pixtral-large",
1718      "name": "Pixtral Large",
1719      "cost_per_1m_in": 2,
1720      "cost_per_1m_out": 6,
1721      "cost_per_1m_in_cached": 0,
1722      "cost_per_1m_out_cached": 0,
1723      "context_window": 128000,
1724      "default_max_tokens": 4000,
1725      "can_reason": false,
1726      "supports_attachments": true,
1727      "options": {}
1728    },
1729    {
1730      "id": "alibaba/qwen-3-32b",
1731      "name": "Qwen 3 32B",
1732      "cost_per_1m_in": 0.09999999999999999,
1733      "cost_per_1m_out": 0.3,
1734      "cost_per_1m_in_cached": 0,
1735      "cost_per_1m_out_cached": 0,
1736      "context_window": 40960,
1737      "default_max_tokens": 8000,
1738      "can_reason": true,
1739      "reasoning_levels": [
1740        "low",
1741        "medium",
1742        "high"
1743      ],
1744      "default_reasoning_effort": "medium",
1745      "supports_attachments": false,
1746      "options": {}
1747    },
1748    {
1749      "id": "alibaba/qwen3-coder-30b-a3b",
1750      "name": "Qwen 3 Coder 30B A3B Instruct",
1751      "cost_per_1m_in": 0.07,
1752      "cost_per_1m_out": 0.27,
1753      "cost_per_1m_in_cached": 0,
1754      "cost_per_1m_out_cached": 0,
1755      "context_window": 160000,
1756      "default_max_tokens": 8000,
1757      "can_reason": true,
1758      "reasoning_levels": [
1759        "low",
1760        "medium",
1761        "high"
1762      ],
1763      "default_reasoning_effort": "medium",
1764      "supports_attachments": false,
1765      "options": {}
1766    },
1767    {
1768      "id": "alibaba/qwen3-max-thinking",
1769      "name": "Qwen 3 Max Thinking",
1770      "cost_per_1m_in": 1.2,
1771      "cost_per_1m_out": 6,
1772      "cost_per_1m_in_cached": 0.24,
1773      "cost_per_1m_out_cached": 0,
1774      "context_window": 256000,
1775      "default_max_tokens": 8000,
1776      "can_reason": true,
1777      "reasoning_levels": [
1778        "low",
1779        "medium",
1780        "high"
1781      ],
1782      "default_reasoning_effort": "medium",
1783      "supports_attachments": false,
1784      "options": {}
1785    },
1786    {
1787      "id": "alibaba/qwen3.5-plus",
1788      "name": "Qwen 3.5 Plus",
1789      "cost_per_1m_in": 0.39999999999999997,
1790      "cost_per_1m_out": 2.4,
1791      "cost_per_1m_in_cached": 0.04,
1792      "cost_per_1m_out_cached": 0.5,
1793      "context_window": 1000000,
1794      "default_max_tokens": 8000,
1795      "can_reason": true,
1796      "reasoning_levels": [
1797        "low",
1798        "medium",
1799        "high"
1800      ],
1801      "default_reasoning_effort": "medium",
1802      "supports_attachments": true,
1803      "options": {}
1804    },
1805    {
1806      "id": "alibaba/qwen3-235b-a22b-thinking",
1807      "name": "Qwen3 235B A22B Thinking 2507",
1808      "cost_per_1m_in": 0.3,
1809      "cost_per_1m_out": 2.9000000000000004,
1810      "cost_per_1m_in_cached": 0,
1811      "cost_per_1m_out_cached": 0,
1812      "context_window": 262114,
1813      "default_max_tokens": 8000,
1814      "can_reason": true,
1815      "reasoning_levels": [
1816        "low",
1817        "medium",
1818        "high"
1819      ],
1820      "default_reasoning_effort": "medium",
1821      "supports_attachments": true,
1822      "options": {}
1823    },
1824    {
1825      "id": "alibaba/qwen3-coder",
1826      "name": "Qwen3 Coder 480B A35B Instruct",
1827      "cost_per_1m_in": 0.39999999999999997,
1828      "cost_per_1m_out": 1.5999999999999999,
1829      "cost_per_1m_in_cached": 0,
1830      "cost_per_1m_out_cached": 0,
1831      "context_window": 262144,
1832      "default_max_tokens": 8000,
1833      "can_reason": false,
1834      "supports_attachments": false,
1835      "options": {}
1836    },
1837    {
1838      "id": "alibaba/qwen3-coder-next",
1839      "name": "Qwen3 Coder Next",
1840      "cost_per_1m_in": 0.5,
1841      "cost_per_1m_out": 1.2,
1842      "cost_per_1m_in_cached": 0,
1843      "cost_per_1m_out_cached": 0,
1844      "context_window": 256000,
1845      "default_max_tokens": 8000,
1846      "can_reason": true,
1847      "reasoning_levels": [
1848        "low",
1849        "medium",
1850        "high"
1851      ],
1852      "default_reasoning_effort": "medium",
1853      "supports_attachments": false,
1854      "options": {}
1855    },
1856    {
1857      "id": "alibaba/qwen3-coder-plus",
1858      "name": "Qwen3 Coder Plus",
1859      "cost_per_1m_in": 1,
1860      "cost_per_1m_out": 5,
1861      "cost_per_1m_in_cached": 0.19999999999999998,
1862      "cost_per_1m_out_cached": 0,
1863      "context_window": 1000000,
1864      "default_max_tokens": 8000,
1865      "can_reason": false,
1866      "supports_attachments": false,
1867      "options": {}
1868    },
1869    {
1870      "id": "alibaba/qwen3-max-preview",
1871      "name": "Qwen3 Max Preview",
1872      "cost_per_1m_in": 1.2,
1873      "cost_per_1m_out": 6,
1874      "cost_per_1m_in_cached": 0.24,
1875      "cost_per_1m_out_cached": 0,
1876      "context_window": 262144,
1877      "default_max_tokens": 8000,
1878      "can_reason": false,
1879      "supports_attachments": false,
1880      "options": {}
1881    },
1882    {
1883      "id": "alibaba/qwen3-vl-thinking",
1884      "name": "Qwen3 VL 235B A22B Thinking",
1885      "cost_per_1m_in": 0.22,
1886      "cost_per_1m_out": 0.88,
1887      "cost_per_1m_in_cached": 0,
1888      "cost_per_1m_out_cached": 0,
1889      "context_window": 256000,
1890      "default_max_tokens": 8000,
1891      "can_reason": true,
1892      "reasoning_levels": [
1893        "low",
1894        "medium",
1895        "high"
1896      ],
1897      "default_reasoning_effort": "medium",
1898      "supports_attachments": true,
1899      "options": {}
1900    },
1901    {
1902      "id": "alibaba/qwen-3-14b",
1903      "name": "Qwen3-14B",
1904      "cost_per_1m_in": 0.06,
1905      "cost_per_1m_out": 0.24,
1906      "cost_per_1m_in_cached": 0,
1907      "cost_per_1m_out_cached": 0,
1908      "context_window": 40960,
1909      "default_max_tokens": 8000,
1910      "can_reason": true,
1911      "reasoning_levels": [
1912        "low",
1913        "medium",
1914        "high"
1915      ],
1916      "default_reasoning_effort": "medium",
1917      "supports_attachments": false,
1918      "options": {}
1919    },
1920    {
1921      "id": "alibaba/qwen-3-235b",
1922      "name": "Qwen3-235B-A22B",
1923      "cost_per_1m_in": 0.071,
1924      "cost_per_1m_out": 0.463,
1925      "cost_per_1m_in_cached": 0,
1926      "cost_per_1m_out_cached": 0,
1927      "context_window": 40960,
1928      "default_max_tokens": 8000,
1929      "can_reason": false,
1930      "supports_attachments": false,
1931      "options": {}
1932    },
1933    {
1934      "id": "alibaba/qwen-3-30b",
1935      "name": "Qwen3-30B-A3B",
1936      "cost_per_1m_in": 0.08,
1937      "cost_per_1m_out": 0.29,
1938      "cost_per_1m_in_cached": 0,
1939      "cost_per_1m_out_cached": 0,
1940      "context_window": 40960,
1941      "default_max_tokens": 8000,
1942      "can_reason": true,
1943      "reasoning_levels": [
1944        "low",
1945        "medium",
1946        "high"
1947      ],
1948      "default_reasoning_effort": "medium",
1949      "supports_attachments": false,
1950      "options": {}
1951    },
1952    {
1953      "id": "bytedance/seed-1.6",
1954      "name": "Seed 1.6",
1955      "cost_per_1m_in": 0.25,
1956      "cost_per_1m_out": 2,
1957      "cost_per_1m_in_cached": 0.049999999999999996,
1958      "cost_per_1m_out_cached": 0,
1959      "context_window": 256000,
1960      "default_max_tokens": 8000,
1961      "can_reason": true,
1962      "reasoning_levels": [
1963        "low",
1964        "medium",
1965        "high"
1966      ],
1967      "default_reasoning_effort": "medium",
1968      "supports_attachments": false,
1969      "options": {}
1970    },
1971    {
1972      "id": "perplexity/sonar",
1973      "name": "Sonar",
1974      "cost_per_1m_in": 1,
1975      "cost_per_1m_out": 1,
1976      "cost_per_1m_in_cached": 0,
1977      "cost_per_1m_out_cached": 0,
1978      "context_window": 127000,
1979      "default_max_tokens": 8000,
1980      "can_reason": false,
1981      "supports_attachments": true,
1982      "options": {}
1983    },
1984    {
1985      "id": "perplexity/sonar-pro",
1986      "name": "Sonar Pro",
1987      "cost_per_1m_in": 3,
1988      "cost_per_1m_out": 15,
1989      "cost_per_1m_in_cached": 0,
1990      "cost_per_1m_out_cached": 0,
1991      "context_window": 200000,
1992      "default_max_tokens": 8000,
1993      "can_reason": false,
1994      "supports_attachments": true,
1995      "options": {}
1996    },
1997    {
1998      "id": "arcee-ai/trinity-large-preview",
1999      "name": "Trinity Large Preview",
2000      "cost_per_1m_in": 0.25,
2001      "cost_per_1m_out": 1,
2002      "cost_per_1m_in_cached": 0,
2003      "cost_per_1m_out_cached": 0,
2004      "context_window": 131000,
2005      "default_max_tokens": 8000,
2006      "can_reason": false,
2007      "supports_attachments": false,
2008      "options": {}
2009    },
2010    {
2011      "id": "openai/gpt-oss-120b",
2012      "name": "gpt-oss-120b",
2013      "cost_per_1m_in": 0.09999999999999999,
2014      "cost_per_1m_out": 0.5,
2015      "cost_per_1m_in_cached": 0,
2016      "cost_per_1m_out_cached": 0,
2017      "context_window": 131072,
2018      "default_max_tokens": 8000,
2019      "can_reason": true,
2020      "reasoning_levels": [
2021        "low",
2022        "medium",
2023        "high"
2024      ],
2025      "default_reasoning_effort": "medium",
2026      "supports_attachments": false,
2027      "options": {}
2028    },
2029    {
2030      "id": "openai/gpt-oss-20b",
2031      "name": "gpt-oss-20b",
2032      "cost_per_1m_in": 0.07,
2033      "cost_per_1m_out": 0.3,
2034      "cost_per_1m_in_cached": 0,
2035      "cost_per_1m_out_cached": 0,
2036      "context_window": 128000,
2037      "default_max_tokens": 8000,
2038      "can_reason": true,
2039      "reasoning_levels": [
2040        "low",
2041        "medium",
2042        "high"
2043      ],
2044      "default_reasoning_effort": "medium",
2045      "supports_attachments": false,
2046      "options": {}
2047    },
2048    {
2049      "id": "openai/gpt-oss-safeguard-20b",
2050      "name": "gpt-oss-safeguard-20b",
2051      "cost_per_1m_in": 0.075,
2052      "cost_per_1m_out": 0.3,
2053      "cost_per_1m_in_cached": 0.037,
2054      "cost_per_1m_out_cached": 0,
2055      "context_window": 131072,
2056      "default_max_tokens": 8000,
2057      "can_reason": true,
2058      "reasoning_levels": [
2059        "low",
2060        "medium",
2061        "high"
2062      ],
2063      "default_reasoning_effort": "medium",
2064      "supports_attachments": false,
2065      "options": {}
2066    },
2067    {
2068      "id": "openai/o1",
2069      "name": "o1",
2070      "cost_per_1m_in": 15,
2071      "cost_per_1m_out": 60,
2072      "cost_per_1m_in_cached": 7.5,
2073      "cost_per_1m_out_cached": 0,
2074      "context_window": 200000,
2075      "default_max_tokens": 8000,
2076      "can_reason": true,
2077      "reasoning_levels": [
2078        "low",
2079        "medium",
2080        "high"
2081      ],
2082      "default_reasoning_effort": "medium",
2083      "supports_attachments": true,
2084      "options": {}
2085    },
2086    {
2087      "id": "openai/o3",
2088      "name": "o3",
2089      "cost_per_1m_in": 2,
2090      "cost_per_1m_out": 8,
2091      "cost_per_1m_in_cached": 0.5,
2092      "cost_per_1m_out_cached": 0,
2093      "context_window": 200000,
2094      "default_max_tokens": 8000,
2095      "can_reason": true,
2096      "reasoning_levels": [
2097        "low",
2098        "medium",
2099        "high"
2100      ],
2101      "default_reasoning_effort": "medium",
2102      "supports_attachments": true,
2103      "options": {}
2104    },
2105    {
2106      "id": "openai/o3-pro",
2107      "name": "o3 Pro",
2108      "cost_per_1m_in": 20,
2109      "cost_per_1m_out": 80,
2110      "cost_per_1m_in_cached": 0,
2111      "cost_per_1m_out_cached": 0,
2112      "context_window": 200000,
2113      "default_max_tokens": 8000,
2114      "can_reason": true,
2115      "reasoning_levels": [
2116        "low",
2117        "medium",
2118        "high"
2119      ],
2120      "default_reasoning_effort": "medium",
2121      "supports_attachments": true,
2122      "options": {}
2123    },
2124    {
2125      "id": "openai/o3-deep-research",
2126      "name": "o3-deep-research",
2127      "cost_per_1m_in": 10,
2128      "cost_per_1m_out": 40,
2129      "cost_per_1m_in_cached": 2.5,
2130      "cost_per_1m_out_cached": 0,
2131      "context_window": 200000,
2132      "default_max_tokens": 8000,
2133      "can_reason": true,
2134      "reasoning_levels": [
2135        "low",
2136        "medium",
2137        "high"
2138      ],
2139      "default_reasoning_effort": "medium",
2140      "supports_attachments": true,
2141      "options": {}
2142    },
2143    {
2144      "id": "openai/o3-mini",
2145      "name": "o3-mini",
2146      "cost_per_1m_in": 1.1,
2147      "cost_per_1m_out": 4.4,
2148      "cost_per_1m_in_cached": 0.55,
2149      "cost_per_1m_out_cached": 0,
2150      "context_window": 200000,
2151      "default_max_tokens": 8000,
2152      "can_reason": true,
2153      "reasoning_levels": [
2154        "low",
2155        "medium",
2156        "high"
2157      ],
2158      "default_reasoning_effort": "medium",
2159      "supports_attachments": false,
2160      "options": {}
2161    },
2162    {
2163      "id": "openai/o4-mini",
2164      "name": "o4-mini",
2165      "cost_per_1m_in": 1.1,
2166      "cost_per_1m_out": 4.4,
2167      "cost_per_1m_in_cached": 0.275,
2168      "cost_per_1m_out_cached": 0,
2169      "context_window": 200000,
2170      "default_max_tokens": 8000,
2171      "can_reason": true,
2172      "reasoning_levels": [
2173        "low",
2174        "medium",
2175        "high"
2176      ],
2177      "default_reasoning_effort": "medium",
2178      "supports_attachments": true,
2179      "options": {}
2180    },
2181    {
2182      "id": "vercel/v0-1.0-md",
2183      "name": "v0-1.0-md",
2184      "cost_per_1m_in": 3,
2185      "cost_per_1m_out": 15,
2186      "cost_per_1m_in_cached": 0,
2187      "cost_per_1m_out_cached": 0,
2188      "context_window": 128000,
2189      "default_max_tokens": 8000,
2190      "can_reason": false,
2191      "supports_attachments": true,
2192      "options": {}
2193    },
2194    {
2195      "id": "vercel/v0-1.5-md",
2196      "name": "v0-1.5-md",
2197      "cost_per_1m_in": 3,
2198      "cost_per_1m_out": 15,
2199      "cost_per_1m_in_cached": 0,
2200      "cost_per_1m_out_cached": 0,
2201      "context_window": 128000,
2202      "default_max_tokens": 8000,
2203      "can_reason": false,
2204      "supports_attachments": true,
2205      "options": {}
2206    }
2207  ],
2208  "default_headers": {
2209    "HTTP-Referer": "https://charm.land",
2210    "X-Title": "Crush"
2211  }
2212}