vercel.json

   1{
   2  "name": "Vercel",
   3  "id": "vercel",
   4  "api_key": "$VERCEL_API_KEY",
   5  "api_endpoint": "https://ai-gateway.vercel.sh/v1",
   6  "type": "vercel",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-haiku-4.5",
   9  "models": [
  10    {
  11      "id": "anthropic/claude-3-haiku",
  12      "name": "Claude 3 Haiku",
  13      "cost_per_1m_in": 0.25,
  14      "cost_per_1m_out": 1.25,
  15      "cost_per_1m_in_cached": 0.03,
  16      "cost_per_1m_out_cached": 0.3,
  17      "context_window": 200000,
  18      "default_max_tokens": 4096,
  19      "can_reason": false,
  20      "supports_attachments": true,
  21      "options": {}
  22    },
  23    {
  24      "id": "anthropic/claude-3.5-haiku",
  25      "name": "Claude 3.5 Haiku",
  26      "cost_per_1m_in": 0.7999999999999999,
  27      "cost_per_1m_out": 4,
  28      "cost_per_1m_in_cached": 0.08,
  29      "cost_per_1m_out_cached": 1,
  30      "context_window": 200000,
  31      "default_max_tokens": 8000,
  32      "can_reason": false,
  33      "supports_attachments": true,
  34      "options": {}
  35    },
  36    {
  37      "id": "anthropic/claude-3.5-sonnet",
  38      "name": "Claude 3.5 Sonnet",
  39      "cost_per_1m_in": 3,
  40      "cost_per_1m_out": 15,
  41      "cost_per_1m_in_cached": 0.3,
  42      "cost_per_1m_out_cached": 3.75,
  43      "context_window": 200000,
  44      "default_max_tokens": 8000,
  45      "can_reason": false,
  46      "supports_attachments": true,
  47      "options": {}
  48    },
  49    {
  50      "id": "anthropic/claude-3.5-sonnet-20240620",
  51      "name": "Claude 3.5 Sonnet (2024-06-20)",
  52      "cost_per_1m_in": 3,
  53      "cost_per_1m_out": 15,
  54      "cost_per_1m_in_cached": 0,
  55      "cost_per_1m_out_cached": 0,
  56      "context_window": 200000,
  57      "default_max_tokens": 8000,
  58      "can_reason": false,
  59      "supports_attachments": true,
  60      "options": {}
  61    },
  62    {
  63      "id": "anthropic/claude-3.7-sonnet",
  64      "name": "Claude 3.7 Sonnet",
  65      "cost_per_1m_in": 3,
  66      "cost_per_1m_out": 15,
  67      "cost_per_1m_in_cached": 0.3,
  68      "cost_per_1m_out_cached": 3.75,
  69      "context_window": 200000,
  70      "default_max_tokens": 8000,
  71      "can_reason": true,
  72      "reasoning_levels": [
  73        "none",
  74        "minimal",
  75        "low",
  76        "medium",
  77        "high",
  78        "xhigh"
  79      ],
  80      "default_reasoning_effort": "medium",
  81      "supports_attachments": true,
  82      "options": {}
  83    },
  84    {
  85      "id": "anthropic/claude-haiku-4.5",
  86      "name": "Claude Haiku 4.5",
  87      "cost_per_1m_in": 1,
  88      "cost_per_1m_out": 5,
  89      "cost_per_1m_in_cached": 0.09999999999999999,
  90      "cost_per_1m_out_cached": 1.25,
  91      "context_window": 200000,
  92      "default_max_tokens": 8000,
  93      "can_reason": true,
  94      "reasoning_levels": [
  95        "none",
  96        "minimal",
  97        "low",
  98        "medium",
  99        "high",
 100        "xhigh"
 101      ],
 102      "default_reasoning_effort": "medium",
 103      "supports_attachments": true,
 104      "options": {}
 105    },
 106    {
 107      "id": "anthropic/claude-opus-4",
 108      "name": "Claude Opus 4",
 109      "cost_per_1m_in": 15,
 110      "cost_per_1m_out": 75,
 111      "cost_per_1m_in_cached": 1.5,
 112      "cost_per_1m_out_cached": 18.75,
 113      "context_window": 200000,
 114      "default_max_tokens": 8000,
 115      "can_reason": true,
 116      "reasoning_levels": [
 117        "none",
 118        "minimal",
 119        "low",
 120        "medium",
 121        "high",
 122        "xhigh"
 123      ],
 124      "default_reasoning_effort": "medium",
 125      "supports_attachments": true,
 126      "options": {}
 127    },
 128    {
 129      "id": "anthropic/claude-opus-4.1",
 130      "name": "Claude Opus 4.1",
 131      "cost_per_1m_in": 15,
 132      "cost_per_1m_out": 75,
 133      "cost_per_1m_in_cached": 1.5,
 134      "cost_per_1m_out_cached": 18.75,
 135      "context_window": 200000,
 136      "default_max_tokens": 8000,
 137      "can_reason": true,
 138      "reasoning_levels": [
 139        "none",
 140        "minimal",
 141        "low",
 142        "medium",
 143        "high",
 144        "xhigh"
 145      ],
 146      "default_reasoning_effort": "medium",
 147      "supports_attachments": true,
 148      "options": {}
 149    },
 150    {
 151      "id": "anthropic/claude-opus-4.5",
 152      "name": "Claude Opus 4.5",
 153      "cost_per_1m_in": 5,
 154      "cost_per_1m_out": 25,
 155      "cost_per_1m_in_cached": 0.5,
 156      "cost_per_1m_out_cached": 6.25,
 157      "context_window": 200000,
 158      "default_max_tokens": 8000,
 159      "can_reason": true,
 160      "reasoning_levels": [
 161        "none",
 162        "minimal",
 163        "low",
 164        "medium",
 165        "high",
 166        "xhigh"
 167      ],
 168      "default_reasoning_effort": "medium",
 169      "supports_attachments": true,
 170      "options": {}
 171    },
 172    {
 173      "id": "anthropic/claude-sonnet-4",
 174      "name": "Claude Sonnet 4",
 175      "cost_per_1m_in": 3,
 176      "cost_per_1m_out": 15,
 177      "cost_per_1m_in_cached": 0.3,
 178      "cost_per_1m_out_cached": 3.75,
 179      "context_window": 1000000,
 180      "default_max_tokens": 8000,
 181      "can_reason": true,
 182      "reasoning_levels": [
 183        "none",
 184        "minimal",
 185        "low",
 186        "medium",
 187        "high",
 188        "xhigh"
 189      ],
 190      "default_reasoning_effort": "medium",
 191      "supports_attachments": true,
 192      "options": {}
 193    },
 194    {
 195      "id": "anthropic/claude-sonnet-4.5",
 196      "name": "Claude Sonnet 4.5",
 197      "cost_per_1m_in": 3,
 198      "cost_per_1m_out": 15,
 199      "cost_per_1m_in_cached": 0.3,
 200      "cost_per_1m_out_cached": 3.75,
 201      "context_window": 1000000,
 202      "default_max_tokens": 8000,
 203      "can_reason": true,
 204      "reasoning_levels": [
 205        "none",
 206        "minimal",
 207        "low",
 208        "medium",
 209        "high",
 210        "xhigh"
 211      ],
 212      "default_reasoning_effort": "medium",
 213      "supports_attachments": true,
 214      "options": {}
 215    },
 216    {
 217      "id": "openai/codex-mini",
 218      "name": "Codex Mini",
 219      "cost_per_1m_in": 1.5,
 220      "cost_per_1m_out": 6,
 221      "cost_per_1m_in_cached": 0.375,
 222      "cost_per_1m_out_cached": 0,
 223      "context_window": 200000,
 224      "default_max_tokens": 8000,
 225      "can_reason": true,
 226      "reasoning_levels": [
 227        "low",
 228        "medium",
 229        "high"
 230      ],
 231      "default_reasoning_effort": "medium",
 232      "supports_attachments": true,
 233      "options": {}
 234    },
 235    {
 236      "id": "cohere/command-a",
 237      "name": "Command A",
 238      "cost_per_1m_in": 2.5,
 239      "cost_per_1m_out": 10,
 240      "cost_per_1m_in_cached": 0,
 241      "cost_per_1m_out_cached": 0,
 242      "context_window": 256000,
 243      "default_max_tokens": 8000,
 244      "can_reason": false,
 245      "supports_attachments": false,
 246      "options": {}
 247    },
 248    {
 249      "id": "deepseek/deepseek-v3",
 250      "name": "DeepSeek V3 0324",
 251      "cost_per_1m_in": 0.77,
 252      "cost_per_1m_out": 0.77,
 253      "cost_per_1m_in_cached": 0,
 254      "cost_per_1m_out_cached": 0,
 255      "context_window": 163840,
 256      "default_max_tokens": 8000,
 257      "can_reason": false,
 258      "supports_attachments": false,
 259      "options": {}
 260    },
 261    {
 262      "id": "deepseek/deepseek-v3.1-terminus",
 263      "name": "DeepSeek V3.1 Terminus",
 264      "cost_per_1m_in": 0.27,
 265      "cost_per_1m_out": 1,
 266      "cost_per_1m_in_cached": 0,
 267      "cost_per_1m_out_cached": 0,
 268      "context_window": 131072,
 269      "default_max_tokens": 8000,
 270      "can_reason": true,
 271      "reasoning_levels": [
 272        "low",
 273        "medium",
 274        "high"
 275      ],
 276      "default_reasoning_effort": "medium",
 277      "supports_attachments": false,
 278      "options": {}
 279    },
 280    {
 281      "id": "deepseek/deepseek-v3.2-exp",
 282      "name": "DeepSeek V3.2 Exp",
 283      "cost_per_1m_in": 0.27,
 284      "cost_per_1m_out": 0.39999999999999997,
 285      "cost_per_1m_in_cached": 0,
 286      "cost_per_1m_out_cached": 0,
 287      "context_window": 163840,
 288      "default_max_tokens": 8000,
 289      "can_reason": true,
 290      "reasoning_levels": [
 291        "low",
 292        "medium",
 293        "high"
 294      ],
 295      "default_reasoning_effort": "medium",
 296      "supports_attachments": false,
 297      "options": {}
 298    },
 299    {
 300      "id": "deepseek/deepseek-v3.2-thinking",
 301      "name": "DeepSeek V3.2 Thinking",
 302      "cost_per_1m_in": 0.28,
 303      "cost_per_1m_out": 0.42,
 304      "cost_per_1m_in_cached": 0.028,
 305      "cost_per_1m_out_cached": 0,
 306      "context_window": 128000,
 307      "default_max_tokens": 8000,
 308      "can_reason": true,
 309      "reasoning_levels": [
 310        "low",
 311        "medium",
 312        "high"
 313      ],
 314      "default_reasoning_effort": "medium",
 315      "supports_attachments": false,
 316      "options": {}
 317    },
 318    {
 319      "id": "deepseek/deepseek-v3.1",
 320      "name": "DeepSeek-V3.1",
 321      "cost_per_1m_in": 0.3,
 322      "cost_per_1m_out": 1,
 323      "cost_per_1m_in_cached": 0,
 324      "cost_per_1m_out_cached": 0,
 325      "context_window": 163840,
 326      "default_max_tokens": 8000,
 327      "can_reason": true,
 328      "reasoning_levels": [
 329        "low",
 330        "medium",
 331        "high"
 332      ],
 333      "default_reasoning_effort": "medium",
 334      "supports_attachments": false,
 335      "options": {}
 336    },
 337    {
 338      "id": "mistral/devstral-2",
 339      "name": "Devstral 2",
 340      "cost_per_1m_in": 0,
 341      "cost_per_1m_out": 0,
 342      "cost_per_1m_in_cached": 0,
 343      "cost_per_1m_out_cached": 0,
 344      "context_window": 256000,
 345      "default_max_tokens": 8000,
 346      "can_reason": false,
 347      "supports_attachments": false,
 348      "options": {}
 349    },
 350    {
 351      "id": "mistral/devstral-small",
 352      "name": "Devstral Small 1.1",
 353      "cost_per_1m_in": 0.09999999999999999,
 354      "cost_per_1m_out": 0.3,
 355      "cost_per_1m_in_cached": 0,
 356      "cost_per_1m_out_cached": 0,
 357      "context_window": 128000,
 358      "default_max_tokens": 8000,
 359      "can_reason": false,
 360      "supports_attachments": false,
 361      "options": {}
 362    },
 363    {
 364      "id": "mistral/devstral-small-2",
 365      "name": "Devstral Small 2",
 366      "cost_per_1m_in": 0,
 367      "cost_per_1m_out": 0,
 368      "cost_per_1m_in_cached": 0,
 369      "cost_per_1m_out_cached": 0,
 370      "context_window": 256000,
 371      "default_max_tokens": 8000,
 372      "can_reason": false,
 373      "supports_attachments": false,
 374      "options": {}
 375    },
 376    {
 377      "id": "zai/glm-4.5-air",
 378      "name": "GLM 4.5 Air",
 379      "cost_per_1m_in": 0.19999999999999998,
 380      "cost_per_1m_out": 1.1,
 381      "cost_per_1m_in_cached": 0.03,
 382      "cost_per_1m_out_cached": 0,
 383      "context_window": 128000,
 384      "default_max_tokens": 8000,
 385      "can_reason": true,
 386      "reasoning_levels": [
 387        "low",
 388        "medium",
 389        "high"
 390      ],
 391      "default_reasoning_effort": "medium",
 392      "supports_attachments": false,
 393      "options": {}
 394    },
 395    {
 396      "id": "zai/glm-4.5v",
 397      "name": "GLM 4.5V",
 398      "cost_per_1m_in": 0.6,
 399      "cost_per_1m_out": 1.7999999999999998,
 400      "cost_per_1m_in_cached": 0,
 401      "cost_per_1m_out_cached": 0,
 402      "context_window": 65536,
 403      "default_max_tokens": 8000,
 404      "can_reason": true,
 405      "reasoning_levels": [
 406        "low",
 407        "medium",
 408        "high"
 409      ],
 410      "default_reasoning_effort": "medium",
 411      "supports_attachments": true,
 412      "options": {}
 413    },
 414    {
 415      "id": "zai/glm-4.6",
 416      "name": "GLM 4.6",
 417      "cost_per_1m_in": 0.44999999999999996,
 418      "cost_per_1m_out": 1.7999999999999998,
 419      "cost_per_1m_in_cached": 0.11,
 420      "cost_per_1m_out_cached": 0,
 421      "context_window": 200000,
 422      "default_max_tokens": 8000,
 423      "can_reason": true,
 424      "reasoning_levels": [
 425        "low",
 426        "medium",
 427        "high"
 428      ],
 429      "default_reasoning_effort": "medium",
 430      "supports_attachments": false,
 431      "options": {}
 432    },
 433    {
 434      "id": "zai/glm-4.7",
 435      "name": "GLM 4.7",
 436      "cost_per_1m_in": 0.43,
 437      "cost_per_1m_out": 1.75,
 438      "cost_per_1m_in_cached": 0.08,
 439      "cost_per_1m_out_cached": 0,
 440      "context_window": 202752,
 441      "default_max_tokens": 8000,
 442      "can_reason": true,
 443      "reasoning_levels": [
 444        "low",
 445        "medium",
 446        "high"
 447      ],
 448      "default_reasoning_effort": "medium",
 449      "supports_attachments": false,
 450      "options": {}
 451    },
 452    {
 453      "id": "zai/glm-4.7-flashx",
 454      "name": "GLM 4.7 FlashX",
 455      "cost_per_1m_in": 0.06,
 456      "cost_per_1m_out": 0.39999999999999997,
 457      "cost_per_1m_in_cached": 0.01,
 458      "cost_per_1m_out_cached": 0,
 459      "context_window": 200000,
 460      "default_max_tokens": 8000,
 461      "can_reason": true,
 462      "reasoning_levels": [
 463        "low",
 464        "medium",
 465        "high"
 466      ],
 467      "default_reasoning_effort": "medium",
 468      "supports_attachments": false,
 469      "options": {}
 470    },
 471    {
 472      "id": "zai/glm-4.5",
 473      "name": "GLM-4.5",
 474      "cost_per_1m_in": 0.6,
 475      "cost_per_1m_out": 2.2,
 476      "cost_per_1m_in_cached": 0,
 477      "cost_per_1m_out_cached": 0,
 478      "context_window": 131072,
 479      "default_max_tokens": 8000,
 480      "can_reason": true,
 481      "reasoning_levels": [
 482        "low",
 483        "medium",
 484        "high"
 485      ],
 486      "default_reasoning_effort": "medium",
 487      "supports_attachments": false,
 488      "options": {}
 489    },
 490    {
 491      "id": "zai/glm-4.6v",
 492      "name": "GLM-4.6V",
 493      "cost_per_1m_in": 0.3,
 494      "cost_per_1m_out": 0.8999999999999999,
 495      "cost_per_1m_in_cached": 0.049999999999999996,
 496      "cost_per_1m_out_cached": 0,
 497      "context_window": 128000,
 498      "default_max_tokens": 8000,
 499      "can_reason": true,
 500      "reasoning_levels": [
 501        "low",
 502        "medium",
 503        "high"
 504      ],
 505      "default_reasoning_effort": "medium",
 506      "supports_attachments": true,
 507      "options": {}
 508    },
 509    {
 510      "id": "zai/glm-4.6v-flash",
 511      "name": "GLM-4.6V-Flash",
 512      "cost_per_1m_in": 0,
 513      "cost_per_1m_out": 0,
 514      "cost_per_1m_in_cached": 0,
 515      "cost_per_1m_out_cached": 0,
 516      "context_window": 128000,
 517      "default_max_tokens": 8000,
 518      "can_reason": true,
 519      "reasoning_levels": [
 520        "low",
 521        "medium",
 522        "high"
 523      ],
 524      "default_reasoning_effort": "medium",
 525      "supports_attachments": true,
 526      "options": {}
 527    },
 528    {
 529      "id": "openai/gpt-5.1-codex-max",
 530      "name": "GPT 5.1 Codex Max",
 531      "cost_per_1m_in": 1.25,
 532      "cost_per_1m_out": 10,
 533      "cost_per_1m_in_cached": 0.125,
 534      "cost_per_1m_out_cached": 0,
 535      "context_window": 400000,
 536      "default_max_tokens": 8000,
 537      "can_reason": true,
 538      "reasoning_levels": [
 539        "low",
 540        "medium",
 541        "high"
 542      ],
 543      "default_reasoning_effort": "medium",
 544      "supports_attachments": true,
 545      "options": {}
 546    },
 547    {
 548      "id": "openai/gpt-5.1-thinking",
 549      "name": "GPT 5.1 Thinking",
 550      "cost_per_1m_in": 1.25,
 551      "cost_per_1m_out": 10,
 552      "cost_per_1m_in_cached": 0.13,
 553      "cost_per_1m_out_cached": 0,
 554      "context_window": 400000,
 555      "default_max_tokens": 8000,
 556      "can_reason": true,
 557      "reasoning_levels": [
 558        "low",
 559        "medium",
 560        "high"
 561      ],
 562      "default_reasoning_effort": "medium",
 563      "supports_attachments": true,
 564      "options": {}
 565    },
 566    {
 567      "id": "openai/gpt-5.2",
 568      "name": "GPT 5.2",
 569      "cost_per_1m_in": 1.75,
 570      "cost_per_1m_out": 14,
 571      "cost_per_1m_in_cached": 0.18,
 572      "cost_per_1m_out_cached": 0,
 573      "context_window": 400000,
 574      "default_max_tokens": 8000,
 575      "can_reason": true,
 576      "reasoning_levels": [
 577        "low",
 578        "medium",
 579        "high"
 580      ],
 581      "default_reasoning_effort": "medium",
 582      "supports_attachments": true,
 583      "options": {}
 584    },
 585    {
 586      "id": "openai/gpt-5.2-pro",
 587      "name": "GPT 5.2 ",
 588      "cost_per_1m_in": 21,
 589      "cost_per_1m_out": 168,
 590      "cost_per_1m_in_cached": 0,
 591      "cost_per_1m_out_cached": 0,
 592      "context_window": 400000,
 593      "default_max_tokens": 8000,
 594      "can_reason": true,
 595      "reasoning_levels": [
 596        "low",
 597        "medium",
 598        "high"
 599      ],
 600      "default_reasoning_effort": "medium",
 601      "supports_attachments": true,
 602      "options": {}
 603    },
 604    {
 605      "id": "openai/gpt-4-turbo",
 606      "name": "GPT-4 Turbo",
 607      "cost_per_1m_in": 10,
 608      "cost_per_1m_out": 30,
 609      "cost_per_1m_in_cached": 0,
 610      "cost_per_1m_out_cached": 0,
 611      "context_window": 128000,
 612      "default_max_tokens": 4096,
 613      "can_reason": false,
 614      "supports_attachments": true,
 615      "options": {}
 616    },
 617    {
 618      "id": "openai/gpt-4.1",
 619      "name": "GPT-4.1",
 620      "cost_per_1m_in": 2,
 621      "cost_per_1m_out": 8,
 622      "cost_per_1m_in_cached": 0.5,
 623      "cost_per_1m_out_cached": 0,
 624      "context_window": 1047576,
 625      "default_max_tokens": 8000,
 626      "can_reason": false,
 627      "supports_attachments": true,
 628      "options": {}
 629    },
 630    {
 631      "id": "openai/gpt-4.1-mini",
 632      "name": "GPT-4.1 mini",
 633      "cost_per_1m_in": 0.39999999999999997,
 634      "cost_per_1m_out": 1.5999999999999999,
 635      "cost_per_1m_in_cached": 0.09999999999999999,
 636      "cost_per_1m_out_cached": 0,
 637      "context_window": 1047576,
 638      "default_max_tokens": 8000,
 639      "can_reason": false,
 640      "supports_attachments": true,
 641      "options": {}
 642    },
 643    {
 644      "id": "openai/gpt-4.1-nano",
 645      "name": "GPT-4.1 nano",
 646      "cost_per_1m_in": 0.09999999999999999,
 647      "cost_per_1m_out": 0.39999999999999997,
 648      "cost_per_1m_in_cached": 0.03,
 649      "cost_per_1m_out_cached": 0,
 650      "context_window": 1047576,
 651      "default_max_tokens": 8000,
 652      "can_reason": false,
 653      "supports_attachments": true,
 654      "options": {}
 655    },
 656    {
 657      "id": "openai/gpt-4o",
 658      "name": "GPT-4o",
 659      "cost_per_1m_in": 2.5,
 660      "cost_per_1m_out": 10,
 661      "cost_per_1m_in_cached": 1.25,
 662      "cost_per_1m_out_cached": 0,
 663      "context_window": 128000,
 664      "default_max_tokens": 8000,
 665      "can_reason": false,
 666      "supports_attachments": true,
 667      "options": {}
 668    },
 669    {
 670      "id": "openai/gpt-4o-mini",
 671      "name": "GPT-4o mini",
 672      "cost_per_1m_in": 0.15,
 673      "cost_per_1m_out": 0.6,
 674      "cost_per_1m_in_cached": 0.075,
 675      "cost_per_1m_out_cached": 0,
 676      "context_window": 128000,
 677      "default_max_tokens": 8000,
 678      "can_reason": false,
 679      "supports_attachments": true,
 680      "options": {}
 681    },
 682    {
 683      "id": "openai/gpt-5",
 684      "name": "GPT-5",
 685      "cost_per_1m_in": 1.25,
 686      "cost_per_1m_out": 10,
 687      "cost_per_1m_in_cached": 0.13,
 688      "cost_per_1m_out_cached": 0,
 689      "context_window": 400000,
 690      "default_max_tokens": 8000,
 691      "can_reason": true,
 692      "reasoning_levels": [
 693        "low",
 694        "medium",
 695        "high"
 696      ],
 697      "default_reasoning_effort": "medium",
 698      "supports_attachments": true,
 699      "options": {}
 700    },
 701    {
 702      "id": "openai/gpt-5-chat",
 703      "name": "GPT-5 Chat",
 704      "cost_per_1m_in": 1.25,
 705      "cost_per_1m_out": 10,
 706      "cost_per_1m_in_cached": 0.125,
 707      "cost_per_1m_out_cached": 0,
 708      "context_window": 128000,
 709      "default_max_tokens": 8000,
 710      "can_reason": true,
 711      "reasoning_levels": [
 712        "low",
 713        "medium",
 714        "high"
 715      ],
 716      "default_reasoning_effort": "medium",
 717      "supports_attachments": true,
 718      "options": {}
 719    },
 720    {
 721      "id": "openai/gpt-5-mini",
 722      "name": "GPT-5 mini",
 723      "cost_per_1m_in": 0.25,
 724      "cost_per_1m_out": 2,
 725      "cost_per_1m_in_cached": 0.03,
 726      "cost_per_1m_out_cached": 0,
 727      "context_window": 400000,
 728      "default_max_tokens": 8000,
 729      "can_reason": true,
 730      "reasoning_levels": [
 731        "low",
 732        "medium",
 733        "high"
 734      ],
 735      "default_reasoning_effort": "medium",
 736      "supports_attachments": true,
 737      "options": {}
 738    },
 739    {
 740      "id": "openai/gpt-5-nano",
 741      "name": "GPT-5 nano",
 742      "cost_per_1m_in": 0.049999999999999996,
 743      "cost_per_1m_out": 0.39999999999999997,
 744      "cost_per_1m_in_cached": 0.01,
 745      "cost_per_1m_out_cached": 0,
 746      "context_window": 400000,
 747      "default_max_tokens": 8000,
 748      "can_reason": true,
 749      "reasoning_levels": [
 750        "low",
 751        "medium",
 752        "high"
 753      ],
 754      "default_reasoning_effort": "medium",
 755      "supports_attachments": true,
 756      "options": {}
 757    },
 758    {
 759      "id": "openai/gpt-5-pro",
 760      "name": "GPT-5 pro",
 761      "cost_per_1m_in": 15,
 762      "cost_per_1m_out": 120,
 763      "cost_per_1m_in_cached": 0,
 764      "cost_per_1m_out_cached": 0,
 765      "context_window": 400000,
 766      "default_max_tokens": 8000,
 767      "can_reason": true,
 768      "reasoning_levels": [
 769        "low",
 770        "medium",
 771        "high"
 772      ],
 773      "default_reasoning_effort": "medium",
 774      "supports_attachments": true,
 775      "options": {}
 776    },
 777    {
 778      "id": "openai/gpt-5-codex",
 779      "name": "GPT-5-Codex",
 780      "cost_per_1m_in": 1.25,
 781      "cost_per_1m_out": 10,
 782      "cost_per_1m_in_cached": 0.13,
 783      "cost_per_1m_out_cached": 0,
 784      "context_window": 400000,
 785      "default_max_tokens": 8000,
 786      "can_reason": true,
 787      "reasoning_levels": [
 788        "low",
 789        "medium",
 790        "high"
 791      ],
 792      "default_reasoning_effort": "medium",
 793      "supports_attachments": true,
 794      "options": {}
 795    },
 796    {
 797      "id": "openai/gpt-5.1-codex-mini",
 798      "name": "GPT-5.1 Codex mini",
 799      "cost_per_1m_in": 0.25,
 800      "cost_per_1m_out": 2,
 801      "cost_per_1m_in_cached": 0.024999999999999998,
 802      "cost_per_1m_out_cached": 0,
 803      "context_window": 400000,
 804      "default_max_tokens": 8000,
 805      "can_reason": true,
 806      "reasoning_levels": [
 807        "low",
 808        "medium",
 809        "high"
 810      ],
 811      "default_reasoning_effort": "medium",
 812      "supports_attachments": true,
 813      "options": {}
 814    },
 815    {
 816      "id": "openai/gpt-5.1-instant",
 817      "name": "GPT-5.1 Instant",
 818      "cost_per_1m_in": 1.25,
 819      "cost_per_1m_out": 10,
 820      "cost_per_1m_in_cached": 0.13,
 821      "cost_per_1m_out_cached": 0,
 822      "context_window": 128000,
 823      "default_max_tokens": 8000,
 824      "can_reason": true,
 825      "reasoning_levels": [
 826        "low",
 827        "medium",
 828        "high"
 829      ],
 830      "default_reasoning_effort": "medium",
 831      "supports_attachments": true,
 832      "options": {}
 833    },
 834    {
 835      "id": "openai/gpt-5.1-codex",
 836      "name": "GPT-5.1-Codex",
 837      "cost_per_1m_in": 1.25,
 838      "cost_per_1m_out": 10,
 839      "cost_per_1m_in_cached": 0.125,
 840      "cost_per_1m_out_cached": 0,
 841      "context_window": 400000,
 842      "default_max_tokens": 8000,
 843      "can_reason": true,
 844      "reasoning_levels": [
 845        "low",
 846        "medium",
 847        "high"
 848      ],
 849      "default_reasoning_effort": "medium",
 850      "supports_attachments": true,
 851      "options": {}
 852    },
 853    {
 854      "id": "openai/gpt-5.2-chat",
 855      "name": "GPT-5.2 Chat",
 856      "cost_per_1m_in": 1.75,
 857      "cost_per_1m_out": 14,
 858      "cost_per_1m_in_cached": 0.175,
 859      "cost_per_1m_out_cached": 0,
 860      "context_window": 128000,
 861      "default_max_tokens": 8000,
 862      "can_reason": true,
 863      "reasoning_levels": [
 864        "low",
 865        "medium",
 866        "high"
 867      ],
 868      "default_reasoning_effort": "medium",
 869      "supports_attachments": true,
 870      "options": {}
 871    },
 872    {
 873      "id": "openai/gpt-5.2-codex",
 874      "name": "GPT-5.2-Codex",
 875      "cost_per_1m_in": 1.75,
 876      "cost_per_1m_out": 14,
 877      "cost_per_1m_in_cached": 0.175,
 878      "cost_per_1m_out_cached": 0,
 879      "context_window": 400000,
 880      "default_max_tokens": 8000,
 881      "can_reason": true,
 882      "reasoning_levels": [
 883        "low",
 884        "medium",
 885        "high"
 886      ],
 887      "default_reasoning_effort": "medium",
 888      "supports_attachments": true,
 889      "options": {}
 890    },
 891    {
 892      "id": "google/gemini-2.5-flash",
 893      "name": "Gemini 2.5 Flash",
 894      "cost_per_1m_in": 0.3,
 895      "cost_per_1m_out": 2.5,
 896      "cost_per_1m_in_cached": 0,
 897      "cost_per_1m_out_cached": 0,
 898      "context_window": 1000000,
 899      "default_max_tokens": 8000,
 900      "can_reason": true,
 901      "reasoning_levels": [
 902        "low",
 903        "medium",
 904        "high"
 905      ],
 906      "default_reasoning_effort": "medium",
 907      "supports_attachments": false,
 908      "options": {}
 909    },
 910    {
 911      "id": "google/gemini-2.5-flash-lite",
 912      "name": "Gemini 2.5 Flash Lite",
 913      "cost_per_1m_in": 0.09999999999999999,
 914      "cost_per_1m_out": 0.39999999999999997,
 915      "cost_per_1m_in_cached": 0.01,
 916      "cost_per_1m_out_cached": 0,
 917      "context_window": 1048576,
 918      "default_max_tokens": 8000,
 919      "can_reason": true,
 920      "reasoning_levels": [
 921        "low",
 922        "medium",
 923        "high"
 924      ],
 925      "default_reasoning_effort": "medium",
 926      "supports_attachments": true,
 927      "options": {}
 928    },
 929    {
 930      "id": "google/gemini-2.5-flash-lite-preview-09-2025",
 931      "name": "Gemini 2.5 Flash Lite Preview 09-2025",
 932      "cost_per_1m_in": 0.09999999999999999,
 933      "cost_per_1m_out": 0.39999999999999997,
 934      "cost_per_1m_in_cached": 0.01,
 935      "cost_per_1m_out_cached": 0,
 936      "context_window": 1048576,
 937      "default_max_tokens": 8000,
 938      "can_reason": true,
 939      "reasoning_levels": [
 940        "low",
 941        "medium",
 942        "high"
 943      ],
 944      "default_reasoning_effort": "medium",
 945      "supports_attachments": true,
 946      "options": {}
 947    },
 948    {
 949      "id": "google/gemini-2.5-flash-preview-09-2025",
 950      "name": "Gemini 2.5 Flash Preview 09-2025",
 951      "cost_per_1m_in": 0.3,
 952      "cost_per_1m_out": 2.5,
 953      "cost_per_1m_in_cached": 0.03,
 954      "cost_per_1m_out_cached": 0,
 955      "context_window": 1000000,
 956      "default_max_tokens": 8000,
 957      "can_reason": true,
 958      "reasoning_levels": [
 959        "low",
 960        "medium",
 961        "high"
 962      ],
 963      "default_reasoning_effort": "medium",
 964      "supports_attachments": true,
 965      "options": {}
 966    },
 967    {
 968      "id": "google/gemini-2.5-pro",
 969      "name": "Gemini 2.5 Pro",
 970      "cost_per_1m_in": 1.25,
 971      "cost_per_1m_out": 10,
 972      "cost_per_1m_in_cached": 0,
 973      "cost_per_1m_out_cached": 0,
 974      "context_window": 1048576,
 975      "default_max_tokens": 8000,
 976      "can_reason": true,
 977      "reasoning_levels": [
 978        "low",
 979        "medium",
 980        "high"
 981      ],
 982      "default_reasoning_effort": "medium",
 983      "supports_attachments": false,
 984      "options": {}
 985    },
 986    {
 987      "id": "google/gemini-3-flash",
 988      "name": "Gemini 3 Flash",
 989      "cost_per_1m_in": 0.5,
 990      "cost_per_1m_out": 3,
 991      "cost_per_1m_in_cached": 0.049999999999999996,
 992      "cost_per_1m_out_cached": 0,
 993      "context_window": 1000000,
 994      "default_max_tokens": 8000,
 995      "can_reason": true,
 996      "reasoning_levels": [
 997        "low",
 998        "medium",
 999        "high"
1000      ],
1001      "default_reasoning_effort": "medium",
1002      "supports_attachments": true,
1003      "options": {}
1004    },
1005    {
1006      "id": "google/gemini-3-pro-preview",
1007      "name": "Gemini 3 Pro Preview",
1008      "cost_per_1m_in": 2,
1009      "cost_per_1m_out": 12,
1010      "cost_per_1m_in_cached": 0.19999999999999998,
1011      "cost_per_1m_out_cached": 0,
1012      "context_window": 1000000,
1013      "default_max_tokens": 8000,
1014      "can_reason": true,
1015      "reasoning_levels": [
1016        "low",
1017        "medium",
1018        "high"
1019      ],
1020      "default_reasoning_effort": "medium",
1021      "supports_attachments": true,
1022      "options": {}
1023    },
1024    {
1025      "id": "xai/grok-2-vision",
1026      "name": "Grok 2 Vision",
1027      "cost_per_1m_in": 2,
1028      "cost_per_1m_out": 10,
1029      "cost_per_1m_in_cached": 0,
1030      "cost_per_1m_out_cached": 0,
1031      "context_window": 32768,
1032      "default_max_tokens": 8000,
1033      "can_reason": false,
1034      "supports_attachments": true,
1035      "options": {}
1036    },
1037    {
1038      "id": "xai/grok-3",
1039      "name": "Grok 3 Beta",
1040      "cost_per_1m_in": 3,
1041      "cost_per_1m_out": 15,
1042      "cost_per_1m_in_cached": 0,
1043      "cost_per_1m_out_cached": 0,
1044      "context_window": 131072,
1045      "default_max_tokens": 8000,
1046      "can_reason": false,
1047      "supports_attachments": false,
1048      "options": {}
1049    },
1050    {
1051      "id": "xai/grok-3-fast",
1052      "name": "Grok 3 Fast Beta",
1053      "cost_per_1m_in": 5,
1054      "cost_per_1m_out": 25,
1055      "cost_per_1m_in_cached": 0,
1056      "cost_per_1m_out_cached": 0,
1057      "context_window": 131072,
1058      "default_max_tokens": 8000,
1059      "can_reason": false,
1060      "supports_attachments": false,
1061      "options": {}
1062    },
1063    {
1064      "id": "xai/grok-3-mini",
1065      "name": "Grok 3 Mini Beta",
1066      "cost_per_1m_in": 0.3,
1067      "cost_per_1m_out": 0.5,
1068      "cost_per_1m_in_cached": 0,
1069      "cost_per_1m_out_cached": 0,
1070      "context_window": 131072,
1071      "default_max_tokens": 8000,
1072      "can_reason": false,
1073      "supports_attachments": false,
1074      "options": {}
1075    },
1076    {
1077      "id": "xai/grok-3-mini-fast",
1078      "name": "Grok 3 Mini Fast Beta",
1079      "cost_per_1m_in": 0.6,
1080      "cost_per_1m_out": 4,
1081      "cost_per_1m_in_cached": 0,
1082      "cost_per_1m_out_cached": 0,
1083      "context_window": 131072,
1084      "default_max_tokens": 8000,
1085      "can_reason": false,
1086      "supports_attachments": false,
1087      "options": {}
1088    },
1089    {
1090      "id": "xai/grok-4",
1091      "name": "Grok 4",
1092      "cost_per_1m_in": 3,
1093      "cost_per_1m_out": 15,
1094      "cost_per_1m_in_cached": 0,
1095      "cost_per_1m_out_cached": 0,
1096      "context_window": 256000,
1097      "default_max_tokens": 8000,
1098      "can_reason": true,
1099      "reasoning_levels": [
1100        "low",
1101        "medium",
1102        "high"
1103      ],
1104      "default_reasoning_effort": "medium",
1105      "supports_attachments": true,
1106      "options": {}
1107    },
1108    {
1109      "id": "xai/grok-4-fast-non-reasoning",
1110      "name": "Grok 4 Fast Non-Reasoning",
1111      "cost_per_1m_in": 0.19999999999999998,
1112      "cost_per_1m_out": 0.5,
1113      "cost_per_1m_in_cached": 0.049999999999999996,
1114      "cost_per_1m_out_cached": 0,
1115      "context_window": 2000000,
1116      "default_max_tokens": 8000,
1117      "can_reason": false,
1118      "supports_attachments": false,
1119      "options": {}
1120    },
1121    {
1122      "id": "xai/grok-4-fast-reasoning",
1123      "name": "Grok 4 Fast Reasoning",
1124      "cost_per_1m_in": 0.19999999999999998,
1125      "cost_per_1m_out": 0.5,
1126      "cost_per_1m_in_cached": 0.049999999999999996,
1127      "cost_per_1m_out_cached": 0,
1128      "context_window": 2000000,
1129      "default_max_tokens": 8000,
1130      "can_reason": true,
1131      "reasoning_levels": [
1132        "low",
1133        "medium",
1134        "high"
1135      ],
1136      "default_reasoning_effort": "medium",
1137      "supports_attachments": false,
1138      "options": {}
1139    },
1140    {
1141      "id": "xai/grok-4.1-fast-non-reasoning",
1142      "name": "Grok 4.1 Fast Non-Reasoning",
1143      "cost_per_1m_in": 0.19999999999999998,
1144      "cost_per_1m_out": 0.5,
1145      "cost_per_1m_in_cached": 0.049999999999999996,
1146      "cost_per_1m_out_cached": 0,
1147      "context_window": 2000000,
1148      "default_max_tokens": 8000,
1149      "can_reason": false,
1150      "supports_attachments": false,
1151      "options": {}
1152    },
1153    {
1154      "id": "xai/grok-4.1-fast-reasoning",
1155      "name": "Grok 4.1 Fast Reasoning",
1156      "cost_per_1m_in": 0.19999999999999998,
1157      "cost_per_1m_out": 0.5,
1158      "cost_per_1m_in_cached": 0.049999999999999996,
1159      "cost_per_1m_out_cached": 0,
1160      "context_window": 2000000,
1161      "default_max_tokens": 8000,
1162      "can_reason": true,
1163      "reasoning_levels": [
1164        "low",
1165        "medium",
1166        "high"
1167      ],
1168      "default_reasoning_effort": "medium",
1169      "supports_attachments": false,
1170      "options": {}
1171    },
1172    {
1173      "id": "xai/grok-code-fast-1",
1174      "name": "Grok Code Fast 1",
1175      "cost_per_1m_in": 0.19999999999999998,
1176      "cost_per_1m_out": 1.5,
1177      "cost_per_1m_in_cached": 0.02,
1178      "cost_per_1m_out_cached": 0,
1179      "context_window": 256000,
1180      "default_max_tokens": 8000,
1181      "can_reason": true,
1182      "reasoning_levels": [
1183        "low",
1184        "medium",
1185        "high"
1186      ],
1187      "default_reasoning_effort": "medium",
1188      "supports_attachments": false,
1189      "options": {}
1190    },
1191    {
1192      "id": "prime-intellect/intellect-3",
1193      "name": "INTELLECT 3",
1194      "cost_per_1m_in": 0.19999999999999998,
1195      "cost_per_1m_out": 1.1,
1196      "cost_per_1m_in_cached": 0,
1197      "cost_per_1m_out_cached": 0,
1198      "context_window": 131072,
1199      "default_max_tokens": 8000,
1200      "can_reason": true,
1201      "reasoning_levels": [
1202        "low",
1203        "medium",
1204        "high"
1205      ],
1206      "default_reasoning_effort": "medium",
1207      "supports_attachments": false,
1208      "options": {}
1209    },
1210    {
1211      "id": "moonshotai/kimi-k2",
1212      "name": "Kimi K2",
1213      "cost_per_1m_in": 0.5,
1214      "cost_per_1m_out": 2,
1215      "cost_per_1m_in_cached": 0,
1216      "cost_per_1m_out_cached": 0,
1217      "context_window": 131072,
1218      "default_max_tokens": 8000,
1219      "can_reason": false,
1220      "supports_attachments": false,
1221      "options": {}
1222    },
1223    {
1224      "id": "moonshotai/kimi-k2-thinking",
1225      "name": "Kimi K2 Thinking",
1226      "cost_per_1m_in": 0.47,
1227      "cost_per_1m_out": 2,
1228      "cost_per_1m_in_cached": 0.14100000000000001,
1229      "cost_per_1m_out_cached": 0,
1230      "context_window": 216144,
1231      "default_max_tokens": 8000,
1232      "can_reason": true,
1233      "reasoning_levels": [
1234        "low",
1235        "medium",
1236        "high"
1237      ],
1238      "default_reasoning_effort": "medium",
1239      "supports_attachments": false,
1240      "options": {}
1241    },
1242    {
1243      "id": "moonshotai/kimi-k2-thinking-turbo",
1244      "name": "Kimi K2 Thinking Turbo",
1245      "cost_per_1m_in": 1.15,
1246      "cost_per_1m_out": 8,
1247      "cost_per_1m_in_cached": 0.15,
1248      "cost_per_1m_out_cached": 0,
1249      "context_window": 262114,
1250      "default_max_tokens": 8000,
1251      "can_reason": true,
1252      "reasoning_levels": [
1253        "low",
1254        "medium",
1255        "high"
1256      ],
1257      "default_reasoning_effort": "medium",
1258      "supports_attachments": false,
1259      "options": {}
1260    },
1261    {
1262      "id": "moonshotai/kimi-k2-turbo",
1263      "name": "Kimi K2 Turbo",
1264      "cost_per_1m_in": 2.4,
1265      "cost_per_1m_out": 10,
1266      "cost_per_1m_in_cached": 0,
1267      "cost_per_1m_out_cached": 0,
1268      "context_window": 256000,
1269      "default_max_tokens": 8000,
1270      "can_reason": false,
1271      "supports_attachments": false,
1272      "options": {}
1273    },
1274    {
1275      "id": "moonshotai/kimi-k2.5",
1276      "name": "Kimi K2.5",
1277      "cost_per_1m_in": 0.6,
1278      "cost_per_1m_out": 3,
1279      "cost_per_1m_in_cached": 0.09999999999999999,
1280      "cost_per_1m_out_cached": 0,
1281      "context_window": 256000,
1282      "default_max_tokens": 8000,
1283      "can_reason": true,
1284      "reasoning_levels": [
1285        "low",
1286        "medium",
1287        "high"
1288      ],
1289      "default_reasoning_effort": "medium",
1290      "supports_attachments": true,
1291      "options": {}
1292    },
1293    {
1294      "id": "meta/llama-3.1-70b",
1295      "name": "Llama 3.1 70B Instruct",
1296      "cost_per_1m_in": 0.39999999999999997,
1297      "cost_per_1m_out": 0.39999999999999997,
1298      "cost_per_1m_in_cached": 0,
1299      "cost_per_1m_out_cached": 0,
1300      "context_window": 131072,
1301      "default_max_tokens": 8000,
1302      "can_reason": false,
1303      "supports_attachments": false,
1304      "options": {}
1305    },
1306    {
1307      "id": "meta/llama-3.1-8b",
1308      "name": "Llama 3.1 8B Instruct",
1309      "cost_per_1m_in": 0.03,
1310      "cost_per_1m_out": 0.049999999999999996,
1311      "cost_per_1m_in_cached": 0,
1312      "cost_per_1m_out_cached": 0,
1313      "context_window": 131072,
1314      "default_max_tokens": 8000,
1315      "can_reason": false,
1316      "supports_attachments": false,
1317      "options": {}
1318    },
1319    {
1320      "id": "meta/llama-3.2-11b",
1321      "name": "Llama 3.2 11B Vision Instruct",
1322      "cost_per_1m_in": 0.16,
1323      "cost_per_1m_out": 0.16,
1324      "cost_per_1m_in_cached": 0,
1325      "cost_per_1m_out_cached": 0,
1326      "context_window": 128000,
1327      "default_max_tokens": 8000,
1328      "can_reason": false,
1329      "supports_attachments": true,
1330      "options": {}
1331    },
1332    {
1333      "id": "meta/llama-3.2-90b",
1334      "name": "Llama 3.2 90B Vision Instruct",
1335      "cost_per_1m_in": 0.72,
1336      "cost_per_1m_out": 0.72,
1337      "cost_per_1m_in_cached": 0,
1338      "cost_per_1m_out_cached": 0,
1339      "context_window": 128000,
1340      "default_max_tokens": 8000,
1341      "can_reason": false,
1342      "supports_attachments": true,
1343      "options": {}
1344    },
1345    {
1346      "id": "meta/llama-3.3-70b",
1347      "name": "Llama 3.3 70B Instruct",
1348      "cost_per_1m_in": 0.72,
1349      "cost_per_1m_out": 0.72,
1350      "cost_per_1m_in_cached": 0,
1351      "cost_per_1m_out_cached": 0,
1352      "context_window": 128000,
1353      "default_max_tokens": 8000,
1354      "can_reason": false,
1355      "supports_attachments": false,
1356      "options": {}
1357    },
1358    {
1359      "id": "meta/llama-4-maverick",
1360      "name": "Llama 4 Maverick 17B Instruct",
1361      "cost_per_1m_in": 0.15,
1362      "cost_per_1m_out": 0.6,
1363      "cost_per_1m_in_cached": 0,
1364      "cost_per_1m_out_cached": 0,
1365      "context_window": 131072,
1366      "default_max_tokens": 8000,
1367      "can_reason": false,
1368      "supports_attachments": true,
1369      "options": {}
1370    },
1371    {
1372      "id": "meta/llama-4-scout",
1373      "name": "Llama 4 Scout 17B Instruct",
1374      "cost_per_1m_in": 0.08,
1375      "cost_per_1m_out": 0.3,
1376      "cost_per_1m_in_cached": 0,
1377      "cost_per_1m_out_cached": 0,
1378      "context_window": 131072,
1379      "default_max_tokens": 8000,
1380      "can_reason": false,
1381      "supports_attachments": true,
1382      "options": {}
1383    },
1384    {
1385      "id": "meituan/longcat-flash-chat",
1386      "name": "LongCat Flash Chat",
1387      "cost_per_1m_in": 0,
1388      "cost_per_1m_out": 0,
1389      "cost_per_1m_in_cached": 0,
1390      "cost_per_1m_out_cached": 0,
1391      "context_window": 128000,
1392      "default_max_tokens": 8000,
1393      "can_reason": false,
1394      "supports_attachments": false,
1395      "options": {}
1396    },
1397    {
1398      "id": "meituan/longcat-flash-thinking",
1399      "name": "LongCat Flash Thinking",
1400      "cost_per_1m_in": 0.15,
1401      "cost_per_1m_out": 1.5,
1402      "cost_per_1m_in_cached": 0,
1403      "cost_per_1m_out_cached": 0,
1404      "context_window": 128000,
1405      "default_max_tokens": 8000,
1406      "can_reason": true,
1407      "reasoning_levels": [
1408        "low",
1409        "medium",
1410        "high"
1411      ],
1412      "default_reasoning_effort": "medium",
1413      "supports_attachments": false,
1414      "options": {}
1415    },
1416    {
1417      "id": "inception/mercury-coder-small",
1418      "name": "Mercury Coder Small Beta",
1419      "cost_per_1m_in": 0.25,
1420      "cost_per_1m_out": 1,
1421      "cost_per_1m_in_cached": 0,
1422      "cost_per_1m_out_cached": 0,
1423      "context_window": 32000,
1424      "default_max_tokens": 8000,
1425      "can_reason": false,
1426      "supports_attachments": false,
1427      "options": {}
1428    },
1429    {
1430      "id": "xiaomi/mimo-v2-flash",
1431      "name": "MiMo V2 Flash",
1432      "cost_per_1m_in": 0.09,
1433      "cost_per_1m_out": 0.29,
1434      "cost_per_1m_in_cached": 0,
1435      "cost_per_1m_out_cached": 0,
1436      "context_window": 262144,
1437      "default_max_tokens": 8000,
1438      "can_reason": true,
1439      "reasoning_levels": [
1440        "low",
1441        "medium",
1442        "high"
1443      ],
1444      "default_reasoning_effort": "medium",
1445      "supports_attachments": false,
1446      "options": {}
1447    },
1448    {
1449      "id": "minimax/minimax-m2",
1450      "name": "MiniMax M2",
1451      "cost_per_1m_in": 0.27,
1452      "cost_per_1m_out": 1.15,
1453      "cost_per_1m_in_cached": 0,
1454      "cost_per_1m_out_cached": 0,
1455      "context_window": 262114,
1456      "default_max_tokens": 8000,
1457      "can_reason": true,
1458      "reasoning_levels": [
1459        "low",
1460        "medium",
1461        "high"
1462      ],
1463      "default_reasoning_effort": "medium",
1464      "supports_attachments": false,
1465      "options": {}
1466    },
1467    {
1468      "id": "minimax/minimax-m2.1",
1469      "name": "MiniMax M2.1",
1470      "cost_per_1m_in": 0.28,
1471      "cost_per_1m_out": 1.2,
1472      "cost_per_1m_in_cached": 0.14,
1473      "cost_per_1m_out_cached": 0,
1474      "context_window": 196608,
1475      "default_max_tokens": 8000,
1476      "can_reason": true,
1477      "reasoning_levels": [
1478        "low",
1479        "medium",
1480        "high"
1481      ],
1482      "default_reasoning_effort": "medium",
1483      "supports_attachments": false,
1484      "options": {}
1485    },
1486    {
1487      "id": "minimax/minimax-m2.1-lightning",
1488      "name": "MiniMax M2.1 Lightning",
1489      "cost_per_1m_in": 0.3,
1490      "cost_per_1m_out": 2.4,
1491      "cost_per_1m_in_cached": 0.03,
1492      "cost_per_1m_out_cached": 0.375,
1493      "context_window": 204800,
1494      "default_max_tokens": 8000,
1495      "can_reason": true,
1496      "reasoning_levels": [
1497        "low",
1498        "medium",
1499        "high"
1500      ],
1501      "default_reasoning_effort": "medium",
1502      "supports_attachments": false,
1503      "options": {}
1504    },
1505    {
1506      "id": "mistral/ministral-3b",
1507      "name": "Ministral 3B",
1508      "cost_per_1m_in": 0.04,
1509      "cost_per_1m_out": 0.04,
1510      "cost_per_1m_in_cached": 0,
1511      "cost_per_1m_out_cached": 0,
1512      "context_window": 128000,
1513      "default_max_tokens": 4000,
1514      "can_reason": false,
1515      "supports_attachments": false,
1516      "options": {}
1517    },
1518    {
1519      "id": "mistral/ministral-8b",
1520      "name": "Ministral 8B",
1521      "cost_per_1m_in": 0.09999999999999999,
1522      "cost_per_1m_out": 0.09999999999999999,
1523      "cost_per_1m_in_cached": 0,
1524      "cost_per_1m_out_cached": 0,
1525      "context_window": 128000,
1526      "default_max_tokens": 4000,
1527      "can_reason": false,
1528      "supports_attachments": false,
1529      "options": {}
1530    },
1531    {
1532      "id": "mistral/codestral",
1533      "name": "Mistral Codestral",
1534      "cost_per_1m_in": 0.3,
1535      "cost_per_1m_out": 0.8999999999999999,
1536      "cost_per_1m_in_cached": 0,
1537      "cost_per_1m_out_cached": 0,
1538      "context_window": 128000,
1539      "default_max_tokens": 4000,
1540      "can_reason": false,
1541      "supports_attachments": false,
1542      "options": {}
1543    },
1544    {
1545      "id": "mistral/mistral-medium",
1546      "name": "Mistral Medium 3.1",
1547      "cost_per_1m_in": 0.39999999999999997,
1548      "cost_per_1m_out": 2,
1549      "cost_per_1m_in_cached": 0,
1550      "cost_per_1m_out_cached": 0,
1551      "context_window": 128000,
1552      "default_max_tokens": 8000,
1553      "can_reason": false,
1554      "supports_attachments": true,
1555      "options": {}
1556    },
1557    {
1558      "id": "mistral/mistral-small",
1559      "name": "Mistral Small",
1560      "cost_per_1m_in": 0.09999999999999999,
1561      "cost_per_1m_out": 0.3,
1562      "cost_per_1m_in_cached": 0,
1563      "cost_per_1m_out_cached": 0,
1564      "context_window": 32000,
1565      "default_max_tokens": 4000,
1566      "can_reason": false,
1567      "supports_attachments": true,
1568      "options": {}
1569    },
1570    {
1571      "id": "nvidia/nemotron-nano-12b-v2-vl",
1572      "name": "Nvidia Nemotron Nano 12B V2 VL",
1573      "cost_per_1m_in": 0.19999999999999998,
1574      "cost_per_1m_out": 0.6,
1575      "cost_per_1m_in_cached": 0,
1576      "cost_per_1m_out_cached": 0,
1577      "context_window": 131072,
1578      "default_max_tokens": 8000,
1579      "can_reason": true,
1580      "reasoning_levels": [
1581        "low",
1582        "medium",
1583        "high"
1584      ],
1585      "default_reasoning_effort": "medium",
1586      "supports_attachments": true,
1587      "options": {}
1588    },
1589    {
1590      "id": "nvidia/nemotron-nano-9b-v2",
1591      "name": "Nvidia Nemotron Nano 9B V2",
1592      "cost_per_1m_in": 0.04,
1593      "cost_per_1m_out": 0.16,
1594      "cost_per_1m_in_cached": 0,
1595      "cost_per_1m_out_cached": 0,
1596      "context_window": 131072,
1597      "default_max_tokens": 8000,
1598      "can_reason": true,
1599      "reasoning_levels": [
1600        "low",
1601        "medium",
1602        "high"
1603      ],
1604      "default_reasoning_effort": "medium",
1605      "supports_attachments": false,
1606      "options": {}
1607    },
1608    {
1609      "id": "mistral/pixtral-12b",
1610      "name": "Pixtral 12B 2409",
1611      "cost_per_1m_in": 0.15,
1612      "cost_per_1m_out": 0.15,
1613      "cost_per_1m_in_cached": 0,
1614      "cost_per_1m_out_cached": 0,
1615      "context_window": 128000,
1616      "default_max_tokens": 4000,
1617      "can_reason": false,
1618      "supports_attachments": true,
1619      "options": {}
1620    },
1621    {
1622      "id": "mistral/pixtral-large",
1623      "name": "Pixtral Large",
1624      "cost_per_1m_in": 2,
1625      "cost_per_1m_out": 6,
1626      "cost_per_1m_in_cached": 0,
1627      "cost_per_1m_out_cached": 0,
1628      "context_window": 128000,
1629      "default_max_tokens": 4000,
1630      "can_reason": false,
1631      "supports_attachments": true,
1632      "options": {}
1633    },
1634    {
1635      "id": "alibaba/qwen3-coder-30b-a3b",
1636      "name": "Qwen 3 Coder 30B A3B Instruct",
1637      "cost_per_1m_in": 0.07,
1638      "cost_per_1m_out": 0.27,
1639      "cost_per_1m_in_cached": 0,
1640      "cost_per_1m_out_cached": 0,
1641      "context_window": 160000,
1642      "default_max_tokens": 8000,
1643      "can_reason": true,
1644      "reasoning_levels": [
1645        "low",
1646        "medium",
1647        "high"
1648      ],
1649      "default_reasoning_effort": "medium",
1650      "supports_attachments": false,
1651      "options": {}
1652    },
1653    {
1654      "id": "alibaba/qwen3-max-thinking",
1655      "name": "Qwen 3 Max Thinking",
1656      "cost_per_1m_in": 1.2,
1657      "cost_per_1m_out": 6,
1658      "cost_per_1m_in_cached": 0.24,
1659      "cost_per_1m_out_cached": 0,
1660      "context_window": 256000,
1661      "default_max_tokens": 8000,
1662      "can_reason": true,
1663      "reasoning_levels": [
1664        "low",
1665        "medium",
1666        "high"
1667      ],
1668      "default_reasoning_effort": "medium",
1669      "supports_attachments": false,
1670      "options": {}
1671    },
1672    {
1673      "id": "alibaba/qwen-3-32b",
1674      "name": "Qwen 3.32B",
1675      "cost_per_1m_in": 0.09999999999999999,
1676      "cost_per_1m_out": 0.3,
1677      "cost_per_1m_in_cached": 0,
1678      "cost_per_1m_out_cached": 0,
1679      "context_window": 40960,
1680      "default_max_tokens": 8000,
1681      "can_reason": true,
1682      "reasoning_levels": [
1683        "low",
1684        "medium",
1685        "high"
1686      ],
1687      "default_reasoning_effort": "medium",
1688      "supports_attachments": false,
1689      "options": {}
1690    },
1691    {
1692      "id": "alibaba/qwen3-235b-a22b-thinking",
1693      "name": "Qwen3 235B A22B Thinking 2507",
1694      "cost_per_1m_in": 0.3,
1695      "cost_per_1m_out": 2.9000000000000004,
1696      "cost_per_1m_in_cached": 0,
1697      "cost_per_1m_out_cached": 0,
1698      "context_window": 262114,
1699      "default_max_tokens": 8000,
1700      "can_reason": true,
1701      "reasoning_levels": [
1702        "low",
1703        "medium",
1704        "high"
1705      ],
1706      "default_reasoning_effort": "medium",
1707      "supports_attachments": true,
1708      "options": {}
1709    },
1710    {
1711      "id": "alibaba/qwen-3-235b",
1712      "name": "Qwen3 235B A22b Instruct 2507",
1713      "cost_per_1m_in": 0.071,
1714      "cost_per_1m_out": 0.463,
1715      "cost_per_1m_in_cached": 0,
1716      "cost_per_1m_out_cached": 0,
1717      "context_window": 40960,
1718      "default_max_tokens": 8000,
1719      "can_reason": false,
1720      "supports_attachments": false,
1721      "options": {}
1722    },
1723    {
1724      "id": "alibaba/qwen3-coder",
1725      "name": "Qwen3 Coder 480B A35B Instruct",
1726      "cost_per_1m_in": 0.38,
1727      "cost_per_1m_out": 1.53,
1728      "cost_per_1m_in_cached": 0,
1729      "cost_per_1m_out_cached": 0,
1730      "context_window": 262144,
1731      "default_max_tokens": 8000,
1732      "can_reason": false,
1733      "supports_attachments": false,
1734      "options": {}
1735    },
1736    {
1737      "id": "alibaba/qwen3-coder-plus",
1738      "name": "Qwen3 Coder Plus",
1739      "cost_per_1m_in": 1,
1740      "cost_per_1m_out": 5,
1741      "cost_per_1m_in_cached": 0.19999999999999998,
1742      "cost_per_1m_out_cached": 0,
1743      "context_window": 1000000,
1744      "default_max_tokens": 8000,
1745      "can_reason": false,
1746      "supports_attachments": false,
1747      "options": {}
1748    },
1749    {
1750      "id": "alibaba/qwen3-max-preview",
1751      "name": "Qwen3 Max Preview",
1752      "cost_per_1m_in": 1.2,
1753      "cost_per_1m_out": 6,
1754      "cost_per_1m_in_cached": 0.24,
1755      "cost_per_1m_out_cached": 0,
1756      "context_window": 262144,
1757      "default_max_tokens": 8000,
1758      "can_reason": false,
1759      "supports_attachments": false,
1760      "options": {}
1761    },
1762    {
1763      "id": "alibaba/qwen-3-14b",
1764      "name": "Qwen3-14B",
1765      "cost_per_1m_in": 0.06,
1766      "cost_per_1m_out": 0.24,
1767      "cost_per_1m_in_cached": 0,
1768      "cost_per_1m_out_cached": 0,
1769      "context_window": 40960,
1770      "default_max_tokens": 8000,
1771      "can_reason": true,
1772      "reasoning_levels": [
1773        "low",
1774        "medium",
1775        "high"
1776      ],
1777      "default_reasoning_effort": "medium",
1778      "supports_attachments": false,
1779      "options": {}
1780    },
1781    {
1782      "id": "alibaba/qwen-3-30b",
1783      "name": "Qwen3-30B-A3B",
1784      "cost_per_1m_in": 0.08,
1785      "cost_per_1m_out": 0.29,
1786      "cost_per_1m_in_cached": 0,
1787      "cost_per_1m_out_cached": 0,
1788      "context_window": 40960,
1789      "default_max_tokens": 8000,
1790      "can_reason": true,
1791      "reasoning_levels": [
1792        "low",
1793        "medium",
1794        "high"
1795      ],
1796      "default_reasoning_effort": "medium",
1797      "supports_attachments": false,
1798      "options": {}
1799    },
1800    {
1801      "id": "bytedance/seed-1.6",
1802      "name": "Seed 1.6",
1803      "cost_per_1m_in": 0.25,
1804      "cost_per_1m_out": 2,
1805      "cost_per_1m_in_cached": 0.049999999999999996,
1806      "cost_per_1m_out_cached": 0,
1807      "context_window": 256000,
1808      "default_max_tokens": 8000,
1809      "can_reason": true,
1810      "reasoning_levels": [
1811        "low",
1812        "medium",
1813        "high"
1814      ],
1815      "default_reasoning_effort": "medium",
1816      "supports_attachments": false,
1817      "options": {}
1818    },
1819    {
1820      "id": "perplexity/sonar",
1821      "name": "Sonar",
1822      "cost_per_1m_in": 1,
1823      "cost_per_1m_out": 1,
1824      "cost_per_1m_in_cached": 0,
1825      "cost_per_1m_out_cached": 0,
1826      "context_window": 127000,
1827      "default_max_tokens": 8000,
1828      "can_reason": false,
1829      "supports_attachments": true,
1830      "options": {}
1831    },
1832    {
1833      "id": "perplexity/sonar-pro",
1834      "name": "Sonar Pro",
1835      "cost_per_1m_in": 3,
1836      "cost_per_1m_out": 15,
1837      "cost_per_1m_in_cached": 0,
1838      "cost_per_1m_out_cached": 0,
1839      "context_window": 200000,
1840      "default_max_tokens": 8000,
1841      "can_reason": false,
1842      "supports_attachments": true,
1843      "options": {}
1844    },
1845    {
1846      "id": "arcee-ai/trinity-large-preview",
1847      "name": "Trinity Large Preview",
1848      "cost_per_1m_in": 0.25,
1849      "cost_per_1m_out": 1,
1850      "cost_per_1m_in_cached": 0,
1851      "cost_per_1m_out_cached": 0,
1852      "context_window": 131000,
1853      "default_max_tokens": 8000,
1854      "can_reason": false,
1855      "supports_attachments": false,
1856      "options": {}
1857    },
1858    {
1859      "id": "openai/gpt-oss-120b",
1860      "name": "gpt-oss-120b",
1861      "cost_per_1m_in": 0.09999999999999999,
1862      "cost_per_1m_out": 0.5,
1863      "cost_per_1m_in_cached": 0,
1864      "cost_per_1m_out_cached": 0,
1865      "context_window": 131072,
1866      "default_max_tokens": 8000,
1867      "can_reason": true,
1868      "reasoning_levels": [
1869        "low",
1870        "medium",
1871        "high"
1872      ],
1873      "default_reasoning_effort": "medium",
1874      "supports_attachments": false,
1875      "options": {}
1876    },
1877    {
1878      "id": "openai/gpt-oss-20b",
1879      "name": "gpt-oss-20b",
1880      "cost_per_1m_in": 0.07,
1881      "cost_per_1m_out": 0.3,
1882      "cost_per_1m_in_cached": 0,
1883      "cost_per_1m_out_cached": 0,
1884      "context_window": 128000,
1885      "default_max_tokens": 8000,
1886      "can_reason": true,
1887      "reasoning_levels": [
1888        "low",
1889        "medium",
1890        "high"
1891      ],
1892      "default_reasoning_effort": "medium",
1893      "supports_attachments": false,
1894      "options": {}
1895    },
1896    {
1897      "id": "openai/gpt-oss-safeguard-20b",
1898      "name": "gpt-oss-safeguard-20b",
1899      "cost_per_1m_in": 0.075,
1900      "cost_per_1m_out": 0.3,
1901      "cost_per_1m_in_cached": 0.037,
1902      "cost_per_1m_out_cached": 0,
1903      "context_window": 131072,
1904      "default_max_tokens": 8000,
1905      "can_reason": true,
1906      "reasoning_levels": [
1907        "low",
1908        "medium",
1909        "high"
1910      ],
1911      "default_reasoning_effort": "medium",
1912      "supports_attachments": false,
1913      "options": {}
1914    },
1915    {
1916      "id": "openai/o1",
1917      "name": "o1",
1918      "cost_per_1m_in": 15,
1919      "cost_per_1m_out": 60,
1920      "cost_per_1m_in_cached": 7.5,
1921      "cost_per_1m_out_cached": 0,
1922      "context_window": 200000,
1923      "default_max_tokens": 8000,
1924      "can_reason": true,
1925      "reasoning_levels": [
1926        "low",
1927        "medium",
1928        "high"
1929      ],
1930      "default_reasoning_effort": "medium",
1931      "supports_attachments": true,
1932      "options": {}
1933    },
1934    {
1935      "id": "openai/o3",
1936      "name": "o3",
1937      "cost_per_1m_in": 2,
1938      "cost_per_1m_out": 8,
1939      "cost_per_1m_in_cached": 0.5,
1940      "cost_per_1m_out_cached": 0,
1941      "context_window": 200000,
1942      "default_max_tokens": 8000,
1943      "can_reason": true,
1944      "reasoning_levels": [
1945        "low",
1946        "medium",
1947        "high"
1948      ],
1949      "default_reasoning_effort": "medium",
1950      "supports_attachments": true,
1951      "options": {}
1952    },
1953    {
1954      "id": "openai/o3-pro",
1955      "name": "o3 Pro",
1956      "cost_per_1m_in": 20,
1957      "cost_per_1m_out": 80,
1958      "cost_per_1m_in_cached": 0,
1959      "cost_per_1m_out_cached": 0,
1960      "context_window": 200000,
1961      "default_max_tokens": 8000,
1962      "can_reason": true,
1963      "reasoning_levels": [
1964        "low",
1965        "medium",
1966        "high"
1967      ],
1968      "default_reasoning_effort": "medium",
1969      "supports_attachments": true,
1970      "options": {}
1971    },
1972    {
1973      "id": "openai/o3-deep-research",
1974      "name": "o3-deep-research",
1975      "cost_per_1m_in": 10,
1976      "cost_per_1m_out": 40,
1977      "cost_per_1m_in_cached": 2.5,
1978      "cost_per_1m_out_cached": 0,
1979      "context_window": 200000,
1980      "default_max_tokens": 8000,
1981      "can_reason": true,
1982      "reasoning_levels": [
1983        "low",
1984        "medium",
1985        "high"
1986      ],
1987      "default_reasoning_effort": "medium",
1988      "supports_attachments": true,
1989      "options": {}
1990    },
1991    {
1992      "id": "openai/o3-mini",
1993      "name": "o3-mini",
1994      "cost_per_1m_in": 1.1,
1995      "cost_per_1m_out": 4.4,
1996      "cost_per_1m_in_cached": 0.55,
1997      "cost_per_1m_out_cached": 0,
1998      "context_window": 200000,
1999      "default_max_tokens": 8000,
2000      "can_reason": true,
2001      "reasoning_levels": [
2002        "low",
2003        "medium",
2004        "high"
2005      ],
2006      "default_reasoning_effort": "medium",
2007      "supports_attachments": false,
2008      "options": {}
2009    },
2010    {
2011      "id": "openai/o4-mini",
2012      "name": "o4-mini",
2013      "cost_per_1m_in": 1.1,
2014      "cost_per_1m_out": 4.4,
2015      "cost_per_1m_in_cached": 0.275,
2016      "cost_per_1m_out_cached": 0,
2017      "context_window": 200000,
2018      "default_max_tokens": 8000,
2019      "can_reason": true,
2020      "reasoning_levels": [
2021        "low",
2022        "medium",
2023        "high"
2024      ],
2025      "default_reasoning_effort": "medium",
2026      "supports_attachments": true,
2027      "options": {}
2028    },
2029    {
2030      "id": "vercel/v0-1.0-md",
2031      "name": "v0-1.0-md",
2032      "cost_per_1m_in": 3,
2033      "cost_per_1m_out": 15,
2034      "cost_per_1m_in_cached": 0,
2035      "cost_per_1m_out_cached": 0,
2036      "context_window": 128000,
2037      "default_max_tokens": 8000,
2038      "can_reason": false,
2039      "supports_attachments": true,
2040      "options": {}
2041    },
2042    {
2043      "id": "vercel/v0-1.5-md",
2044      "name": "v0-1.5-md",
2045      "cost_per_1m_in": 3,
2046      "cost_per_1m_out": 15,
2047      "cost_per_1m_in_cached": 0,
2048      "cost_per_1m_out_cached": 0,
2049      "context_window": 128000,
2050      "default_max_tokens": 8000,
2051      "can_reason": false,
2052      "supports_attachments": true,
2053      "options": {}
2054    }
2055  ],
2056  "default_headers": {
2057    "HTTP-Referer": "https://charm.land",
2058    "X-Title": "Crush"
2059  }
2060}