vercel.json

   1{
   2  "name": "Vercel",
   3  "id": "vercel",
   4  "api_key": "$VERCEL_API_KEY",
   5  "api_endpoint": "https://ai-gateway.vercel.sh/v1",
   6  "type": "openai-compat",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-haiku-4.5",
   9  "models": [
  10    {
  11      "id": "anthropic/claude-3-haiku",
  12      "name": "Claude 3 Haiku",
  13      "cost_per_1m_in": 0.25,
  14      "cost_per_1m_out": 1.25,
  15      "cost_per_1m_in_cached": 0.03,
  16      "cost_per_1m_out_cached": 0.3,
  17      "context_window": 200000,
  18      "default_max_tokens": 4096,
  19      "can_reason": false,
  20      "supports_attachments": true,
  21      "options": {}
  22    },
  23    {
  24      "id": "anthropic/claude-3.5-haiku",
  25      "name": "Claude 3.5 Haiku",
  26      "cost_per_1m_in": 0.7999999999999999,
  27      "cost_per_1m_out": 4,
  28      "cost_per_1m_in_cached": 0.08,
  29      "cost_per_1m_out_cached": 1,
  30      "context_window": 200000,
  31      "default_max_tokens": 8000,
  32      "can_reason": false,
  33      "supports_attachments": true,
  34      "options": {}
  35    },
  36    {
  37      "id": "anthropic/claude-3.5-sonnet",
  38      "name": "Claude 3.5 Sonnet",
  39      "cost_per_1m_in": 3,
  40      "cost_per_1m_out": 15,
  41      "cost_per_1m_in_cached": 0.3,
  42      "cost_per_1m_out_cached": 3.75,
  43      "context_window": 200000,
  44      "default_max_tokens": 8000,
  45      "can_reason": false,
  46      "supports_attachments": true,
  47      "options": {}
  48    },
  49    {
  50      "id": "anthropic/claude-3.5-sonnet-20240620",
  51      "name": "Claude 3.5 Sonnet (2024-06-20)",
  52      "cost_per_1m_in": 3,
  53      "cost_per_1m_out": 15,
  54      "cost_per_1m_in_cached": 0,
  55      "cost_per_1m_out_cached": 0,
  56      "context_window": 200000,
  57      "default_max_tokens": 8000,
  58      "can_reason": false,
  59      "supports_attachments": true,
  60      "options": {}
  61    },
  62    {
  63      "id": "anthropic/claude-3.7-sonnet",
  64      "name": "Claude 3.7 Sonnet",
  65      "cost_per_1m_in": 3,
  66      "cost_per_1m_out": 15,
  67      "cost_per_1m_in_cached": 0.3,
  68      "cost_per_1m_out_cached": 3.75,
  69      "context_window": 200000,
  70      "default_max_tokens": 8000,
  71      "can_reason": true,
  72      "reasoning_levels": [
  73        "low",
  74        "medium",
  75        "high"
  76      ],
  77      "default_reasoning_effort": "medium",
  78      "supports_attachments": true,
  79      "options": {}
  80    },
  81    {
  82      "id": "anthropic/claude-haiku-4.5",
  83      "name": "Claude Haiku 4.5",
  84      "cost_per_1m_in": 1,
  85      "cost_per_1m_out": 5,
  86      "cost_per_1m_in_cached": 0.09999999999999999,
  87      "cost_per_1m_out_cached": 1.25,
  88      "context_window": 200000,
  89      "default_max_tokens": 8000,
  90      "can_reason": true,
  91      "reasoning_levels": [
  92        "low",
  93        "medium",
  94        "high"
  95      ],
  96      "default_reasoning_effort": "medium",
  97      "supports_attachments": true,
  98      "options": {}
  99    },
 100    {
 101      "id": "anthropic/claude-opus-4",
 102      "name": "Claude Opus 4",
 103      "cost_per_1m_in": 15,
 104      "cost_per_1m_out": 75,
 105      "cost_per_1m_in_cached": 1.5,
 106      "cost_per_1m_out_cached": 18.75,
 107      "context_window": 200000,
 108      "default_max_tokens": 8000,
 109      "can_reason": true,
 110      "reasoning_levels": [
 111        "low",
 112        "medium",
 113        "high"
 114      ],
 115      "default_reasoning_effort": "medium",
 116      "supports_attachments": true,
 117      "options": {}
 118    },
 119    {
 120      "id": "anthropic/claude-opus-4.1",
 121      "name": "Claude Opus 4.1",
 122      "cost_per_1m_in": 15,
 123      "cost_per_1m_out": 75,
 124      "cost_per_1m_in_cached": 1.5,
 125      "cost_per_1m_out_cached": 18.75,
 126      "context_window": 200000,
 127      "default_max_tokens": 8000,
 128      "can_reason": true,
 129      "reasoning_levels": [
 130        "low",
 131        "medium",
 132        "high"
 133      ],
 134      "default_reasoning_effort": "medium",
 135      "supports_attachments": true,
 136      "options": {}
 137    },
 138    {
 139      "id": "anthropic/claude-opus-4.5",
 140      "name": "Claude Opus 4.5",
 141      "cost_per_1m_in": 5,
 142      "cost_per_1m_out": 25,
 143      "cost_per_1m_in_cached": 0.5,
 144      "cost_per_1m_out_cached": 6.25,
 145      "context_window": 200000,
 146      "default_max_tokens": 8000,
 147      "can_reason": true,
 148      "reasoning_levels": [
 149        "low",
 150        "medium",
 151        "high"
 152      ],
 153      "default_reasoning_effort": "medium",
 154      "supports_attachments": true,
 155      "options": {}
 156    },
 157    {
 158      "id": "anthropic/claude-sonnet-4",
 159      "name": "Claude Sonnet 4",
 160      "cost_per_1m_in": 3,
 161      "cost_per_1m_out": 15,
 162      "cost_per_1m_in_cached": 0.3,
 163      "cost_per_1m_out_cached": 3.75,
 164      "context_window": 1000000,
 165      "default_max_tokens": 8000,
 166      "can_reason": true,
 167      "reasoning_levels": [
 168        "low",
 169        "medium",
 170        "high"
 171      ],
 172      "default_reasoning_effort": "medium",
 173      "supports_attachments": true,
 174      "options": {}
 175    },
 176    {
 177      "id": "anthropic/claude-sonnet-4.5",
 178      "name": "Claude Sonnet 4.5",
 179      "cost_per_1m_in": 3,
 180      "cost_per_1m_out": 15,
 181      "cost_per_1m_in_cached": 0.3,
 182      "cost_per_1m_out_cached": 3.75,
 183      "context_window": 1000000,
 184      "default_max_tokens": 8000,
 185      "can_reason": true,
 186      "reasoning_levels": [
 187        "low",
 188        "medium",
 189        "high"
 190      ],
 191      "default_reasoning_effort": "medium",
 192      "supports_attachments": true,
 193      "options": {}
 194    },
 195    {
 196      "id": "openai/codex-mini",
 197      "name": "Codex Mini",
 198      "cost_per_1m_in": 1.5,
 199      "cost_per_1m_out": 6,
 200      "cost_per_1m_in_cached": 0.375,
 201      "cost_per_1m_out_cached": 0,
 202      "context_window": 200000,
 203      "default_max_tokens": 8000,
 204      "can_reason": true,
 205      "reasoning_levels": [
 206        "low",
 207        "medium",
 208        "high"
 209      ],
 210      "default_reasoning_effort": "medium",
 211      "supports_attachments": true,
 212      "options": {}
 213    },
 214    {
 215      "id": "cohere/command-a",
 216      "name": "Command A",
 217      "cost_per_1m_in": 2.5,
 218      "cost_per_1m_out": 10,
 219      "cost_per_1m_in_cached": 0,
 220      "cost_per_1m_out_cached": 0,
 221      "context_window": 256000,
 222      "default_max_tokens": 8000,
 223      "can_reason": false,
 224      "supports_attachments": false,
 225      "options": {}
 226    },
 227    {
 228      "id": "deepseek/deepseek-v3",
 229      "name": "DeepSeek V3 0324",
 230      "cost_per_1m_in": 0.77,
 231      "cost_per_1m_out": 0.77,
 232      "cost_per_1m_in_cached": 0,
 233      "cost_per_1m_out_cached": 0,
 234      "context_window": 163840,
 235      "default_max_tokens": 8000,
 236      "can_reason": false,
 237      "supports_attachments": false,
 238      "options": {}
 239    },
 240    {
 241      "id": "deepseek/deepseek-v3.1-terminus",
 242      "name": "DeepSeek V3.1 Terminus",
 243      "cost_per_1m_in": 0.27,
 244      "cost_per_1m_out": 1,
 245      "cost_per_1m_in_cached": 0,
 246      "cost_per_1m_out_cached": 0,
 247      "context_window": 131072,
 248      "default_max_tokens": 8000,
 249      "can_reason": true,
 250      "reasoning_levels": [
 251        "low",
 252        "medium",
 253        "high"
 254      ],
 255      "default_reasoning_effort": "medium",
 256      "supports_attachments": false,
 257      "options": {}
 258    },
 259    {
 260      "id": "deepseek/deepseek-v3.2-exp",
 261      "name": "DeepSeek V3.2 Exp",
 262      "cost_per_1m_in": 0.27,
 263      "cost_per_1m_out": 0.39999999999999997,
 264      "cost_per_1m_in_cached": 0,
 265      "cost_per_1m_out_cached": 0,
 266      "context_window": 163840,
 267      "default_max_tokens": 8000,
 268      "can_reason": true,
 269      "reasoning_levels": [
 270        "low",
 271        "medium",
 272        "high"
 273      ],
 274      "default_reasoning_effort": "medium",
 275      "supports_attachments": false,
 276      "options": {}
 277    },
 278    {
 279      "id": "deepseek/deepseek-v3.2-thinking",
 280      "name": "DeepSeek V3.2 Thinking",
 281      "cost_per_1m_in": 0.28,
 282      "cost_per_1m_out": 0.42,
 283      "cost_per_1m_in_cached": 0.028,
 284      "cost_per_1m_out_cached": 0,
 285      "context_window": 128000,
 286      "default_max_tokens": 8000,
 287      "can_reason": true,
 288      "reasoning_levels": [
 289        "low",
 290        "medium",
 291        "high"
 292      ],
 293      "default_reasoning_effort": "medium",
 294      "supports_attachments": false,
 295      "options": {}
 296    },
 297    {
 298      "id": "deepseek/deepseek-v3.1",
 299      "name": "DeepSeek-V3.1",
 300      "cost_per_1m_in": 0.3,
 301      "cost_per_1m_out": 1,
 302      "cost_per_1m_in_cached": 0,
 303      "cost_per_1m_out_cached": 0,
 304      "context_window": 163840,
 305      "default_max_tokens": 8000,
 306      "can_reason": true,
 307      "reasoning_levels": [
 308        "low",
 309        "medium",
 310        "high"
 311      ],
 312      "default_reasoning_effort": "medium",
 313      "supports_attachments": false,
 314      "options": {}
 315    },
 316    {
 317      "id": "mistral/devstral-2",
 318      "name": "Devstral 2",
 319      "cost_per_1m_in": 0,
 320      "cost_per_1m_out": 0,
 321      "cost_per_1m_in_cached": 0,
 322      "cost_per_1m_out_cached": 0,
 323      "context_window": 256000,
 324      "default_max_tokens": 8000,
 325      "can_reason": false,
 326      "supports_attachments": false,
 327      "options": {}
 328    },
 329    {
 330      "id": "mistral/devstral-small",
 331      "name": "Devstral Small 1.1",
 332      "cost_per_1m_in": 0.09999999999999999,
 333      "cost_per_1m_out": 0.3,
 334      "cost_per_1m_in_cached": 0,
 335      "cost_per_1m_out_cached": 0,
 336      "context_window": 128000,
 337      "default_max_tokens": 8000,
 338      "can_reason": false,
 339      "supports_attachments": false,
 340      "options": {}
 341    },
 342    {
 343      "id": "mistral/devstral-small-2",
 344      "name": "Devstral Small 2",
 345      "cost_per_1m_in": 0,
 346      "cost_per_1m_out": 0,
 347      "cost_per_1m_in_cached": 0,
 348      "cost_per_1m_out_cached": 0,
 349      "context_window": 256000,
 350      "default_max_tokens": 8000,
 351      "can_reason": false,
 352      "supports_attachments": false,
 353      "options": {}
 354    },
 355    {
 356      "id": "zai/glm-4.5-air",
 357      "name": "GLM 4.5 Air",
 358      "cost_per_1m_in": 0.19999999999999998,
 359      "cost_per_1m_out": 1.1,
 360      "cost_per_1m_in_cached": 0.03,
 361      "cost_per_1m_out_cached": 0,
 362      "context_window": 128000,
 363      "default_max_tokens": 8000,
 364      "can_reason": true,
 365      "reasoning_levels": [
 366        "low",
 367        "medium",
 368        "high"
 369      ],
 370      "default_reasoning_effort": "medium",
 371      "supports_attachments": false,
 372      "options": {}
 373    },
 374    {
 375      "id": "zai/glm-4.5v",
 376      "name": "GLM 4.5V",
 377      "cost_per_1m_in": 0.6,
 378      "cost_per_1m_out": 1.7999999999999998,
 379      "cost_per_1m_in_cached": 0,
 380      "cost_per_1m_out_cached": 0,
 381      "context_window": 65536,
 382      "default_max_tokens": 8000,
 383      "can_reason": true,
 384      "reasoning_levels": [
 385        "low",
 386        "medium",
 387        "high"
 388      ],
 389      "default_reasoning_effort": "medium",
 390      "supports_attachments": true,
 391      "options": {}
 392    },
 393    {
 394      "id": "zai/glm-4.6",
 395      "name": "GLM 4.6",
 396      "cost_per_1m_in": 0.44999999999999996,
 397      "cost_per_1m_out": 1.7999999999999998,
 398      "cost_per_1m_in_cached": 0.11,
 399      "cost_per_1m_out_cached": 0,
 400      "context_window": 200000,
 401      "default_max_tokens": 8000,
 402      "can_reason": true,
 403      "reasoning_levels": [
 404        "low",
 405        "medium",
 406        "high"
 407      ],
 408      "default_reasoning_effort": "medium",
 409      "supports_attachments": false,
 410      "options": {}
 411    },
 412    {
 413      "id": "zai/glm-4.7",
 414      "name": "GLM 4.7",
 415      "cost_per_1m_in": 0.43,
 416      "cost_per_1m_out": 1.75,
 417      "cost_per_1m_in_cached": 0.08,
 418      "cost_per_1m_out_cached": 0,
 419      "context_window": 202752,
 420      "default_max_tokens": 8000,
 421      "can_reason": true,
 422      "reasoning_levels": [
 423        "low",
 424        "medium",
 425        "high"
 426      ],
 427      "default_reasoning_effort": "medium",
 428      "supports_attachments": false,
 429      "options": {}
 430    },
 431    {
 432      "id": "zai/glm-4.7-flashx",
 433      "name": "GLM 4.7 FlashX",
 434      "cost_per_1m_in": 0.06,
 435      "cost_per_1m_out": 0.39999999999999997,
 436      "cost_per_1m_in_cached": 0.01,
 437      "cost_per_1m_out_cached": 0,
 438      "context_window": 200000,
 439      "default_max_tokens": 8000,
 440      "can_reason": true,
 441      "reasoning_levels": [
 442        "low",
 443        "medium",
 444        "high"
 445      ],
 446      "default_reasoning_effort": "medium",
 447      "supports_attachments": false,
 448      "options": {}
 449    },
 450    {
 451      "id": "zai/glm-4.5",
 452      "name": "GLM-4.5",
 453      "cost_per_1m_in": 0.6,
 454      "cost_per_1m_out": 2.2,
 455      "cost_per_1m_in_cached": 0,
 456      "cost_per_1m_out_cached": 0,
 457      "context_window": 131072,
 458      "default_max_tokens": 8000,
 459      "can_reason": true,
 460      "reasoning_levels": [
 461        "low",
 462        "medium",
 463        "high"
 464      ],
 465      "default_reasoning_effort": "medium",
 466      "supports_attachments": false,
 467      "options": {}
 468    },
 469    {
 470      "id": "zai/glm-4.6v",
 471      "name": "GLM-4.6V",
 472      "cost_per_1m_in": 0.3,
 473      "cost_per_1m_out": 0.8999999999999999,
 474      "cost_per_1m_in_cached": 0.049999999999999996,
 475      "cost_per_1m_out_cached": 0,
 476      "context_window": 128000,
 477      "default_max_tokens": 8000,
 478      "can_reason": true,
 479      "reasoning_levels": [
 480        "low",
 481        "medium",
 482        "high"
 483      ],
 484      "default_reasoning_effort": "medium",
 485      "supports_attachments": true,
 486      "options": {}
 487    },
 488    {
 489      "id": "zai/glm-4.6v-flash",
 490      "name": "GLM-4.6V-Flash",
 491      "cost_per_1m_in": 0,
 492      "cost_per_1m_out": 0,
 493      "cost_per_1m_in_cached": 0,
 494      "cost_per_1m_out_cached": 0,
 495      "context_window": 128000,
 496      "default_max_tokens": 8000,
 497      "can_reason": true,
 498      "reasoning_levels": [
 499        "low",
 500        "medium",
 501        "high"
 502      ],
 503      "default_reasoning_effort": "medium",
 504      "supports_attachments": true,
 505      "options": {}
 506    },
 507    {
 508      "id": "openai/gpt-5.1-codex-max",
 509      "name": "GPT 5.1 Codex Max",
 510      "cost_per_1m_in": 1.25,
 511      "cost_per_1m_out": 10,
 512      "cost_per_1m_in_cached": 0.125,
 513      "cost_per_1m_out_cached": 0,
 514      "context_window": 400000,
 515      "default_max_tokens": 8000,
 516      "can_reason": true,
 517      "reasoning_levels": [
 518        "low",
 519        "medium",
 520        "high"
 521      ],
 522      "default_reasoning_effort": "medium",
 523      "supports_attachments": true,
 524      "options": {}
 525    },
 526    {
 527      "id": "openai/gpt-5.1-thinking",
 528      "name": "GPT 5.1 Thinking",
 529      "cost_per_1m_in": 1.25,
 530      "cost_per_1m_out": 10,
 531      "cost_per_1m_in_cached": 0.125,
 532      "cost_per_1m_out_cached": 0,
 533      "context_window": 400000,
 534      "default_max_tokens": 8000,
 535      "can_reason": true,
 536      "reasoning_levels": [
 537        "low",
 538        "medium",
 539        "high"
 540      ],
 541      "default_reasoning_effort": "medium",
 542      "supports_attachments": true,
 543      "options": {}
 544    },
 545    {
 546      "id": "openai/gpt-5.2-pro",
 547      "name": "GPT 5.2 ",
 548      "cost_per_1m_in": 21,
 549      "cost_per_1m_out": 168,
 550      "cost_per_1m_in_cached": 0,
 551      "cost_per_1m_out_cached": 0,
 552      "context_window": 400000,
 553      "default_max_tokens": 8000,
 554      "can_reason": true,
 555      "reasoning_levels": [
 556        "low",
 557        "medium",
 558        "high"
 559      ],
 560      "default_reasoning_effort": "medium",
 561      "supports_attachments": true,
 562      "options": {}
 563    },
 564    {
 565      "id": "openai/gpt-4-turbo",
 566      "name": "GPT-4 Turbo",
 567      "cost_per_1m_in": 10,
 568      "cost_per_1m_out": 30,
 569      "cost_per_1m_in_cached": 0,
 570      "cost_per_1m_out_cached": 0,
 571      "context_window": 128000,
 572      "default_max_tokens": 4096,
 573      "can_reason": false,
 574      "supports_attachments": true,
 575      "options": {}
 576    },
 577    {
 578      "id": "openai/gpt-4.1",
 579      "name": "GPT-4.1",
 580      "cost_per_1m_in": 2,
 581      "cost_per_1m_out": 8,
 582      "cost_per_1m_in_cached": 0.5,
 583      "cost_per_1m_out_cached": 0,
 584      "context_window": 1047576,
 585      "default_max_tokens": 8000,
 586      "can_reason": false,
 587      "supports_attachments": true,
 588      "options": {}
 589    },
 590    {
 591      "id": "openai/gpt-4.1-mini",
 592      "name": "GPT-4.1 mini",
 593      "cost_per_1m_in": 0.39999999999999997,
 594      "cost_per_1m_out": 1.5999999999999999,
 595      "cost_per_1m_in_cached": 0.09999999999999999,
 596      "cost_per_1m_out_cached": 0,
 597      "context_window": 1047576,
 598      "default_max_tokens": 8000,
 599      "can_reason": false,
 600      "supports_attachments": true,
 601      "options": {}
 602    },
 603    {
 604      "id": "openai/gpt-4.1-nano",
 605      "name": "GPT-4.1 nano",
 606      "cost_per_1m_in": 0.09999999999999999,
 607      "cost_per_1m_out": 0.39999999999999997,
 608      "cost_per_1m_in_cached": 0.024999999999999998,
 609      "cost_per_1m_out_cached": 0,
 610      "context_window": 1047576,
 611      "default_max_tokens": 8000,
 612      "can_reason": false,
 613      "supports_attachments": true,
 614      "options": {}
 615    },
 616    {
 617      "id": "openai/gpt-4o",
 618      "name": "GPT-4o",
 619      "cost_per_1m_in": 2.5,
 620      "cost_per_1m_out": 10,
 621      "cost_per_1m_in_cached": 1.25,
 622      "cost_per_1m_out_cached": 0,
 623      "context_window": 128000,
 624      "default_max_tokens": 8000,
 625      "can_reason": false,
 626      "supports_attachments": true,
 627      "options": {}
 628    },
 629    {
 630      "id": "openai/gpt-4o-mini",
 631      "name": "GPT-4o mini",
 632      "cost_per_1m_in": 0.15,
 633      "cost_per_1m_out": 0.6,
 634      "cost_per_1m_in_cached": 0.075,
 635      "cost_per_1m_out_cached": 0,
 636      "context_window": 128000,
 637      "default_max_tokens": 8000,
 638      "can_reason": false,
 639      "supports_attachments": true,
 640      "options": {}
 641    },
 642    {
 643      "id": "openai/gpt-5",
 644      "name": "GPT-5",
 645      "cost_per_1m_in": 1.25,
 646      "cost_per_1m_out": 10,
 647      "cost_per_1m_in_cached": 0.13,
 648      "cost_per_1m_out_cached": 0,
 649      "context_window": 400000,
 650      "default_max_tokens": 8000,
 651      "can_reason": true,
 652      "reasoning_levels": [
 653        "low",
 654        "medium",
 655        "high"
 656      ],
 657      "default_reasoning_effort": "medium",
 658      "supports_attachments": true,
 659      "options": {}
 660    },
 661    {
 662      "id": "openai/gpt-5-chat",
 663      "name": "GPT-5 Chat",
 664      "cost_per_1m_in": 1.25,
 665      "cost_per_1m_out": 10,
 666      "cost_per_1m_in_cached": 0.125,
 667      "cost_per_1m_out_cached": 0,
 668      "context_window": 128000,
 669      "default_max_tokens": 8000,
 670      "can_reason": true,
 671      "reasoning_levels": [
 672        "low",
 673        "medium",
 674        "high"
 675      ],
 676      "default_reasoning_effort": "medium",
 677      "supports_attachments": true,
 678      "options": {}
 679    },
 680    {
 681      "id": "openai/gpt-5-mini",
 682      "name": "GPT-5 mini",
 683      "cost_per_1m_in": 0.25,
 684      "cost_per_1m_out": 2,
 685      "cost_per_1m_in_cached": 0.03,
 686      "cost_per_1m_out_cached": 0,
 687      "context_window": 400000,
 688      "default_max_tokens": 8000,
 689      "can_reason": true,
 690      "reasoning_levels": [
 691        "low",
 692        "medium",
 693        "high"
 694      ],
 695      "default_reasoning_effort": "medium",
 696      "supports_attachments": true,
 697      "options": {}
 698    },
 699    {
 700      "id": "openai/gpt-5-nano",
 701      "name": "GPT-5 nano",
 702      "cost_per_1m_in": 0.049999999999999996,
 703      "cost_per_1m_out": 0.39999999999999997,
 704      "cost_per_1m_in_cached": 0.01,
 705      "cost_per_1m_out_cached": 0,
 706      "context_window": 400000,
 707      "default_max_tokens": 8000,
 708      "can_reason": true,
 709      "reasoning_levels": [
 710        "low",
 711        "medium",
 712        "high"
 713      ],
 714      "default_reasoning_effort": "medium",
 715      "supports_attachments": true,
 716      "options": {}
 717    },
 718    {
 719      "id": "openai/gpt-5-pro",
 720      "name": "GPT-5 pro",
 721      "cost_per_1m_in": 15,
 722      "cost_per_1m_out": 120,
 723      "cost_per_1m_in_cached": 0,
 724      "cost_per_1m_out_cached": 0,
 725      "context_window": 400000,
 726      "default_max_tokens": 8000,
 727      "can_reason": true,
 728      "reasoning_levels": [
 729        "low",
 730        "medium",
 731        "high"
 732      ],
 733      "default_reasoning_effort": "medium",
 734      "supports_attachments": true,
 735      "options": {}
 736    },
 737    {
 738      "id": "openai/gpt-5-codex",
 739      "name": "GPT-5-Codex",
 740      "cost_per_1m_in": 1.25,
 741      "cost_per_1m_out": 10,
 742      "cost_per_1m_in_cached": 0.13,
 743      "cost_per_1m_out_cached": 0,
 744      "context_window": 400000,
 745      "default_max_tokens": 8000,
 746      "can_reason": true,
 747      "reasoning_levels": [
 748        "low",
 749        "medium",
 750        "high"
 751      ],
 752      "default_reasoning_effort": "medium",
 753      "supports_attachments": true,
 754      "options": {}
 755    },
 756    {
 757      "id": "openai/gpt-5.1-codex-mini",
 758      "name": "GPT-5.1 Codex mini",
 759      "cost_per_1m_in": 0.25,
 760      "cost_per_1m_out": 2,
 761      "cost_per_1m_in_cached": 0.024999999999999998,
 762      "cost_per_1m_out_cached": 0,
 763      "context_window": 400000,
 764      "default_max_tokens": 8000,
 765      "can_reason": true,
 766      "reasoning_levels": [
 767        "low",
 768        "medium",
 769        "high"
 770      ],
 771      "default_reasoning_effort": "medium",
 772      "supports_attachments": true,
 773      "options": {}
 774    },
 775    {
 776      "id": "openai/gpt-5.1-instant",
 777      "name": "GPT-5.1 Instant",
 778      "cost_per_1m_in": 1.25,
 779      "cost_per_1m_out": 10,
 780      "cost_per_1m_in_cached": 0.125,
 781      "cost_per_1m_out_cached": 0,
 782      "context_window": 128000,
 783      "default_max_tokens": 8000,
 784      "can_reason": true,
 785      "reasoning_levels": [
 786        "low",
 787        "medium",
 788        "high"
 789      ],
 790      "default_reasoning_effort": "medium",
 791      "supports_attachments": true,
 792      "options": {}
 793    },
 794    {
 795      "id": "openai/gpt-5.1-codex",
 796      "name": "GPT-5.1-Codex",
 797      "cost_per_1m_in": 1.25,
 798      "cost_per_1m_out": 10,
 799      "cost_per_1m_in_cached": 0.125,
 800      "cost_per_1m_out_cached": 0,
 801      "context_window": 400000,
 802      "default_max_tokens": 8000,
 803      "can_reason": true,
 804      "reasoning_levels": [
 805        "low",
 806        "medium",
 807        "high"
 808      ],
 809      "default_reasoning_effort": "medium",
 810      "supports_attachments": true,
 811      "options": {}
 812    },
 813    {
 814      "id": "openai/gpt-5.2",
 815      "name": "GPT-5.2",
 816      "cost_per_1m_in": 1.75,
 817      "cost_per_1m_out": 14,
 818      "cost_per_1m_in_cached": 0.175,
 819      "cost_per_1m_out_cached": 0,
 820      "context_window": 400000,
 821      "default_max_tokens": 8000,
 822      "can_reason": true,
 823      "reasoning_levels": [
 824        "low",
 825        "medium",
 826        "high"
 827      ],
 828      "default_reasoning_effort": "medium",
 829      "supports_attachments": true,
 830      "options": {}
 831    },
 832    {
 833      "id": "openai/gpt-5.2-chat",
 834      "name": "GPT-5.2 Chat",
 835      "cost_per_1m_in": 1.75,
 836      "cost_per_1m_out": 14,
 837      "cost_per_1m_in_cached": 0.175,
 838      "cost_per_1m_out_cached": 0,
 839      "context_window": 128000,
 840      "default_max_tokens": 8000,
 841      "can_reason": true,
 842      "reasoning_levels": [
 843        "low",
 844        "medium",
 845        "high"
 846      ],
 847      "default_reasoning_effort": "medium",
 848      "supports_attachments": true,
 849      "options": {}
 850    },
 851    {
 852      "id": "openai/gpt-5.2-codex",
 853      "name": "GPT-5.2-Codex",
 854      "cost_per_1m_in": 1.75,
 855      "cost_per_1m_out": 14,
 856      "cost_per_1m_in_cached": 0.175,
 857      "cost_per_1m_out_cached": 0,
 858      "context_window": 400000,
 859      "default_max_tokens": 8000,
 860      "can_reason": true,
 861      "reasoning_levels": [
 862        "low",
 863        "medium",
 864        "high"
 865      ],
 866      "default_reasoning_effort": "medium",
 867      "supports_attachments": true,
 868      "options": {}
 869    },
 870    {
 871      "id": "google/gemini-2.5-flash",
 872      "name": "Gemini 2.5 Flash",
 873      "cost_per_1m_in": 0.3,
 874      "cost_per_1m_out": 2.5,
 875      "cost_per_1m_in_cached": 0,
 876      "cost_per_1m_out_cached": 0,
 877      "context_window": 1000000,
 878      "default_max_tokens": 8000,
 879      "can_reason": true,
 880      "reasoning_levels": [
 881        "low",
 882        "medium",
 883        "high"
 884      ],
 885      "default_reasoning_effort": "medium",
 886      "supports_attachments": false,
 887      "options": {}
 888    },
 889    {
 890      "id": "google/gemini-2.5-flash-lite",
 891      "name": "Gemini 2.5 Flash Lite",
 892      "cost_per_1m_in": 0.09999999999999999,
 893      "cost_per_1m_out": 0.39999999999999997,
 894      "cost_per_1m_in_cached": 0.01,
 895      "cost_per_1m_out_cached": 0,
 896      "context_window": 1048576,
 897      "default_max_tokens": 8000,
 898      "can_reason": true,
 899      "reasoning_levels": [
 900        "low",
 901        "medium",
 902        "high"
 903      ],
 904      "default_reasoning_effort": "medium",
 905      "supports_attachments": true,
 906      "options": {}
 907    },
 908    {
 909      "id": "google/gemini-2.5-flash-lite-preview-09-2025",
 910      "name": "Gemini 2.5 Flash Lite Preview 09-2025",
 911      "cost_per_1m_in": 0.09999999999999999,
 912      "cost_per_1m_out": 0.39999999999999997,
 913      "cost_per_1m_in_cached": 0.01,
 914      "cost_per_1m_out_cached": 0,
 915      "context_window": 1048576,
 916      "default_max_tokens": 8000,
 917      "can_reason": true,
 918      "reasoning_levels": [
 919        "low",
 920        "medium",
 921        "high"
 922      ],
 923      "default_reasoning_effort": "medium",
 924      "supports_attachments": true,
 925      "options": {}
 926    },
 927    {
 928      "id": "google/gemini-2.5-flash-preview-09-2025",
 929      "name": "Gemini 2.5 Flash Preview 09-2025",
 930      "cost_per_1m_in": 0.3,
 931      "cost_per_1m_out": 2.5,
 932      "cost_per_1m_in_cached": 0.03,
 933      "cost_per_1m_out_cached": 0,
 934      "context_window": 1000000,
 935      "default_max_tokens": 8000,
 936      "can_reason": true,
 937      "reasoning_levels": [
 938        "low",
 939        "medium",
 940        "high"
 941      ],
 942      "default_reasoning_effort": "medium",
 943      "supports_attachments": true,
 944      "options": {}
 945    },
 946    {
 947      "id": "google/gemini-2.5-pro",
 948      "name": "Gemini 2.5 Pro",
 949      "cost_per_1m_in": 1.25,
 950      "cost_per_1m_out": 10,
 951      "cost_per_1m_in_cached": 0,
 952      "cost_per_1m_out_cached": 0,
 953      "context_window": 1048576,
 954      "default_max_tokens": 8000,
 955      "can_reason": true,
 956      "reasoning_levels": [
 957        "low",
 958        "medium",
 959        "high"
 960      ],
 961      "default_reasoning_effort": "medium",
 962      "supports_attachments": false,
 963      "options": {}
 964    },
 965    {
 966      "id": "google/gemini-3-flash",
 967      "name": "Gemini 3 Flash",
 968      "cost_per_1m_in": 0.5,
 969      "cost_per_1m_out": 3,
 970      "cost_per_1m_in_cached": 0.049999999999999996,
 971      "cost_per_1m_out_cached": 0,
 972      "context_window": 1000000,
 973      "default_max_tokens": 8000,
 974      "can_reason": true,
 975      "reasoning_levels": [
 976        "low",
 977        "medium",
 978        "high"
 979      ],
 980      "default_reasoning_effort": "medium",
 981      "supports_attachments": true,
 982      "options": {}
 983    },
 984    {
 985      "id": "google/gemini-3-pro-preview",
 986      "name": "Gemini 3 Pro Preview",
 987      "cost_per_1m_in": 2,
 988      "cost_per_1m_out": 12,
 989      "cost_per_1m_in_cached": 0.19999999999999998,
 990      "cost_per_1m_out_cached": 0,
 991      "context_window": 1000000,
 992      "default_max_tokens": 8000,
 993      "can_reason": true,
 994      "reasoning_levels": [
 995        "low",
 996        "medium",
 997        "high"
 998      ],
 999      "default_reasoning_effort": "medium",
1000      "supports_attachments": true,
1001      "options": {}
1002    },
1003    {
1004      "id": "xai/grok-2-vision",
1005      "name": "Grok 2 Vision",
1006      "cost_per_1m_in": 2,
1007      "cost_per_1m_out": 10,
1008      "cost_per_1m_in_cached": 0,
1009      "cost_per_1m_out_cached": 0,
1010      "context_window": 32768,
1011      "default_max_tokens": 8000,
1012      "can_reason": false,
1013      "supports_attachments": true,
1014      "options": {}
1015    },
1016    {
1017      "id": "xai/grok-3",
1018      "name": "Grok 3 Beta",
1019      "cost_per_1m_in": 3,
1020      "cost_per_1m_out": 15,
1021      "cost_per_1m_in_cached": 0,
1022      "cost_per_1m_out_cached": 0,
1023      "context_window": 131072,
1024      "default_max_tokens": 8000,
1025      "can_reason": false,
1026      "supports_attachments": false,
1027      "options": {}
1028    },
1029    {
1030      "id": "xai/grok-3-fast",
1031      "name": "Grok 3 Fast Beta",
1032      "cost_per_1m_in": 5,
1033      "cost_per_1m_out": 25,
1034      "cost_per_1m_in_cached": 0,
1035      "cost_per_1m_out_cached": 0,
1036      "context_window": 131072,
1037      "default_max_tokens": 8000,
1038      "can_reason": false,
1039      "supports_attachments": false,
1040      "options": {}
1041    },
1042    {
1043      "id": "xai/grok-3-mini",
1044      "name": "Grok 3 Mini Beta",
1045      "cost_per_1m_in": 0.3,
1046      "cost_per_1m_out": 0.5,
1047      "cost_per_1m_in_cached": 0,
1048      "cost_per_1m_out_cached": 0,
1049      "context_window": 131072,
1050      "default_max_tokens": 8000,
1051      "can_reason": false,
1052      "supports_attachments": false,
1053      "options": {}
1054    },
1055    {
1056      "id": "xai/grok-3-mini-fast",
1057      "name": "Grok 3 Mini Fast Beta",
1058      "cost_per_1m_in": 0.6,
1059      "cost_per_1m_out": 4,
1060      "cost_per_1m_in_cached": 0,
1061      "cost_per_1m_out_cached": 0,
1062      "context_window": 131072,
1063      "default_max_tokens": 8000,
1064      "can_reason": false,
1065      "supports_attachments": false,
1066      "options": {}
1067    },
1068    {
1069      "id": "xai/grok-4",
1070      "name": "Grok 4",
1071      "cost_per_1m_in": 3,
1072      "cost_per_1m_out": 15,
1073      "cost_per_1m_in_cached": 0,
1074      "cost_per_1m_out_cached": 0,
1075      "context_window": 256000,
1076      "default_max_tokens": 8000,
1077      "can_reason": true,
1078      "reasoning_levels": [
1079        "low",
1080        "medium",
1081        "high"
1082      ],
1083      "default_reasoning_effort": "medium",
1084      "supports_attachments": true,
1085      "options": {}
1086    },
1087    {
1088      "id": "xai/grok-4-fast-non-reasoning",
1089      "name": "Grok 4 Fast Non-Reasoning",
1090      "cost_per_1m_in": 0.19999999999999998,
1091      "cost_per_1m_out": 0.5,
1092      "cost_per_1m_in_cached": 0.049999999999999996,
1093      "cost_per_1m_out_cached": 0,
1094      "context_window": 2000000,
1095      "default_max_tokens": 8000,
1096      "can_reason": false,
1097      "supports_attachments": false,
1098      "options": {}
1099    },
1100    {
1101      "id": "xai/grok-4-fast-reasoning",
1102      "name": "Grok 4 Fast Reasoning",
1103      "cost_per_1m_in": 0.19999999999999998,
1104      "cost_per_1m_out": 0.5,
1105      "cost_per_1m_in_cached": 0.049999999999999996,
1106      "cost_per_1m_out_cached": 0,
1107      "context_window": 2000000,
1108      "default_max_tokens": 8000,
1109      "can_reason": true,
1110      "reasoning_levels": [
1111        "low",
1112        "medium",
1113        "high"
1114      ],
1115      "default_reasoning_effort": "medium",
1116      "supports_attachments": false,
1117      "options": {}
1118    },
1119    {
1120      "id": "xai/grok-4.1-fast-non-reasoning",
1121      "name": "Grok 4.1 Fast Non-Reasoning",
1122      "cost_per_1m_in": 0.19999999999999998,
1123      "cost_per_1m_out": 0.5,
1124      "cost_per_1m_in_cached": 0.049999999999999996,
1125      "cost_per_1m_out_cached": 0,
1126      "context_window": 2000000,
1127      "default_max_tokens": 8000,
1128      "can_reason": false,
1129      "supports_attachments": false,
1130      "options": {}
1131    },
1132    {
1133      "id": "xai/grok-4.1-fast-reasoning",
1134      "name": "Grok 4.1 Fast Reasoning",
1135      "cost_per_1m_in": 0.19999999999999998,
1136      "cost_per_1m_out": 0.5,
1137      "cost_per_1m_in_cached": 0.049999999999999996,
1138      "cost_per_1m_out_cached": 0,
1139      "context_window": 2000000,
1140      "default_max_tokens": 8000,
1141      "can_reason": true,
1142      "reasoning_levels": [
1143        "low",
1144        "medium",
1145        "high"
1146      ],
1147      "default_reasoning_effort": "medium",
1148      "supports_attachments": false,
1149      "options": {}
1150    },
1151    {
1152      "id": "xai/grok-code-fast-1",
1153      "name": "Grok Code Fast 1",
1154      "cost_per_1m_in": 0.19999999999999998,
1155      "cost_per_1m_out": 1.5,
1156      "cost_per_1m_in_cached": 0.02,
1157      "cost_per_1m_out_cached": 0,
1158      "context_window": 256000,
1159      "default_max_tokens": 8000,
1160      "can_reason": true,
1161      "reasoning_levels": [
1162        "low",
1163        "medium",
1164        "high"
1165      ],
1166      "default_reasoning_effort": "medium",
1167      "supports_attachments": false,
1168      "options": {}
1169    },
1170    {
1171      "id": "prime-intellect/intellect-3",
1172      "name": "INTELLECT 3",
1173      "cost_per_1m_in": 0.19999999999999998,
1174      "cost_per_1m_out": 1.1,
1175      "cost_per_1m_in_cached": 0,
1176      "cost_per_1m_out_cached": 0,
1177      "context_window": 131072,
1178      "default_max_tokens": 8000,
1179      "can_reason": true,
1180      "reasoning_levels": [
1181        "low",
1182        "medium",
1183        "high"
1184      ],
1185      "default_reasoning_effort": "medium",
1186      "supports_attachments": false,
1187      "options": {}
1188    },
1189    {
1190      "id": "moonshotai/kimi-k2",
1191      "name": "Kimi K2",
1192      "cost_per_1m_in": 0.5,
1193      "cost_per_1m_out": 2,
1194      "cost_per_1m_in_cached": 0,
1195      "cost_per_1m_out_cached": 0,
1196      "context_window": 131072,
1197      "default_max_tokens": 8000,
1198      "can_reason": false,
1199      "supports_attachments": false,
1200      "options": {}
1201    },
1202    {
1203      "id": "moonshotai/kimi-k2-thinking",
1204      "name": "Kimi K2 Thinking",
1205      "cost_per_1m_in": 0.47,
1206      "cost_per_1m_out": 2,
1207      "cost_per_1m_in_cached": 0.14100000000000001,
1208      "cost_per_1m_out_cached": 0,
1209      "context_window": 216144,
1210      "default_max_tokens": 8000,
1211      "can_reason": true,
1212      "reasoning_levels": [
1213        "low",
1214        "medium",
1215        "high"
1216      ],
1217      "default_reasoning_effort": "medium",
1218      "supports_attachments": false,
1219      "options": {}
1220    },
1221    {
1222      "id": "moonshotai/kimi-k2-thinking-turbo",
1223      "name": "Kimi K2 Thinking Turbo",
1224      "cost_per_1m_in": 1.15,
1225      "cost_per_1m_out": 8,
1226      "cost_per_1m_in_cached": 0.15,
1227      "cost_per_1m_out_cached": 0,
1228      "context_window": 262114,
1229      "default_max_tokens": 8000,
1230      "can_reason": true,
1231      "reasoning_levels": [
1232        "low",
1233        "medium",
1234        "high"
1235      ],
1236      "default_reasoning_effort": "medium",
1237      "supports_attachments": false,
1238      "options": {}
1239    },
1240    {
1241      "id": "moonshotai/kimi-k2-turbo",
1242      "name": "Kimi K2 Turbo",
1243      "cost_per_1m_in": 2.4,
1244      "cost_per_1m_out": 10,
1245      "cost_per_1m_in_cached": 0,
1246      "cost_per_1m_out_cached": 0,
1247      "context_window": 256000,
1248      "default_max_tokens": 8000,
1249      "can_reason": false,
1250      "supports_attachments": false,
1251      "options": {}
1252    },
1253    {
1254      "id": "meta/llama-3.1-70b",
1255      "name": "Llama 3.1 70B Instruct",
1256      "cost_per_1m_in": 0.39999999999999997,
1257      "cost_per_1m_out": 0.39999999999999997,
1258      "cost_per_1m_in_cached": 0,
1259      "cost_per_1m_out_cached": 0,
1260      "context_window": 131072,
1261      "default_max_tokens": 8000,
1262      "can_reason": false,
1263      "supports_attachments": false,
1264      "options": {}
1265    },
1266    {
1267      "id": "meta/llama-3.1-8b",
1268      "name": "Llama 3.1 8B Instruct",
1269      "cost_per_1m_in": 0.03,
1270      "cost_per_1m_out": 0.049999999999999996,
1271      "cost_per_1m_in_cached": 0,
1272      "cost_per_1m_out_cached": 0,
1273      "context_window": 131072,
1274      "default_max_tokens": 8000,
1275      "can_reason": false,
1276      "supports_attachments": false,
1277      "options": {}
1278    },
1279    {
1280      "id": "meta/llama-3.2-11b",
1281      "name": "Llama 3.2 11B Vision Instruct",
1282      "cost_per_1m_in": 0.16,
1283      "cost_per_1m_out": 0.16,
1284      "cost_per_1m_in_cached": 0,
1285      "cost_per_1m_out_cached": 0,
1286      "context_window": 128000,
1287      "default_max_tokens": 8000,
1288      "can_reason": false,
1289      "supports_attachments": true,
1290      "options": {}
1291    },
1292    {
1293      "id": "meta/llama-3.2-90b",
1294      "name": "Llama 3.2 90B Vision Instruct",
1295      "cost_per_1m_in": 0.72,
1296      "cost_per_1m_out": 0.72,
1297      "cost_per_1m_in_cached": 0,
1298      "cost_per_1m_out_cached": 0,
1299      "context_window": 128000,
1300      "default_max_tokens": 8000,
1301      "can_reason": false,
1302      "supports_attachments": true,
1303      "options": {}
1304    },
1305    {
1306      "id": "meta/llama-3.3-70b",
1307      "name": "Llama 3.3 70B Instruct",
1308      "cost_per_1m_in": 0.72,
1309      "cost_per_1m_out": 0.72,
1310      "cost_per_1m_in_cached": 0,
1311      "cost_per_1m_out_cached": 0,
1312      "context_window": 128000,
1313      "default_max_tokens": 8000,
1314      "can_reason": false,
1315      "supports_attachments": false,
1316      "options": {}
1317    },
1318    {
1319      "id": "meta/llama-4-maverick",
1320      "name": "Llama 4 Maverick 17B Instruct",
1321      "cost_per_1m_in": 0.15,
1322      "cost_per_1m_out": 0.6,
1323      "cost_per_1m_in_cached": 0,
1324      "cost_per_1m_out_cached": 0,
1325      "context_window": 131072,
1326      "default_max_tokens": 8000,
1327      "can_reason": false,
1328      "supports_attachments": true,
1329      "options": {}
1330    },
1331    {
1332      "id": "meta/llama-4-scout",
1333      "name": "Llama 4 Scout 17B Instruct",
1334      "cost_per_1m_in": 0.08,
1335      "cost_per_1m_out": 0.3,
1336      "cost_per_1m_in_cached": 0,
1337      "cost_per_1m_out_cached": 0,
1338      "context_window": 131072,
1339      "default_max_tokens": 8000,
1340      "can_reason": false,
1341      "supports_attachments": true,
1342      "options": {}
1343    },
1344    {
1345      "id": "meituan/longcat-flash-chat",
1346      "name": "LongCat Flash Chat",
1347      "cost_per_1m_in": 0,
1348      "cost_per_1m_out": 0,
1349      "cost_per_1m_in_cached": 0,
1350      "cost_per_1m_out_cached": 0,
1351      "context_window": 128000,
1352      "default_max_tokens": 8000,
1353      "can_reason": false,
1354      "supports_attachments": false,
1355      "options": {}
1356    },
1357    {
1358      "id": "meituan/longcat-flash-thinking",
1359      "name": "LongCat Flash Thinking",
1360      "cost_per_1m_in": 0.15,
1361      "cost_per_1m_out": 1.5,
1362      "cost_per_1m_in_cached": 0,
1363      "cost_per_1m_out_cached": 0,
1364      "context_window": 128000,
1365      "default_max_tokens": 8000,
1366      "can_reason": true,
1367      "reasoning_levels": [
1368        "low",
1369        "medium",
1370        "high"
1371      ],
1372      "default_reasoning_effort": "medium",
1373      "supports_attachments": false,
1374      "options": {}
1375    },
1376    {
1377      "id": "inception/mercury-coder-small",
1378      "name": "Mercury Coder Small Beta",
1379      "cost_per_1m_in": 0.25,
1380      "cost_per_1m_out": 1,
1381      "cost_per_1m_in_cached": 0,
1382      "cost_per_1m_out_cached": 0,
1383      "context_window": 32000,
1384      "default_max_tokens": 8000,
1385      "can_reason": false,
1386      "supports_attachments": false,
1387      "options": {}
1388    },
1389    {
1390      "id": "xiaomi/mimo-v2-flash",
1391      "name": "MiMo V2 Flash",
1392      "cost_per_1m_in": 0.09,
1393      "cost_per_1m_out": 0.29,
1394      "cost_per_1m_in_cached": 0,
1395      "cost_per_1m_out_cached": 0,
1396      "context_window": 262144,
1397      "default_max_tokens": 8000,
1398      "can_reason": true,
1399      "reasoning_levels": [
1400        "low",
1401        "medium",
1402        "high"
1403      ],
1404      "default_reasoning_effort": "medium",
1405      "supports_attachments": false,
1406      "options": {}
1407    },
1408    {
1409      "id": "minimax/minimax-m2",
1410      "name": "MiniMax M2",
1411      "cost_per_1m_in": 0.27,
1412      "cost_per_1m_out": 1.15,
1413      "cost_per_1m_in_cached": 0,
1414      "cost_per_1m_out_cached": 0,
1415      "context_window": 262114,
1416      "default_max_tokens": 8000,
1417      "can_reason": true,
1418      "reasoning_levels": [
1419        "low",
1420        "medium",
1421        "high"
1422      ],
1423      "default_reasoning_effort": "medium",
1424      "supports_attachments": false,
1425      "options": {}
1426    },
1427    {
1428      "id": "minimax/minimax-m2.1",
1429      "name": "MiniMax M2.1",
1430      "cost_per_1m_in": 0.28,
1431      "cost_per_1m_out": 1.2,
1432      "cost_per_1m_in_cached": 0.14,
1433      "cost_per_1m_out_cached": 0,
1434      "context_window": 196608,
1435      "default_max_tokens": 8000,
1436      "can_reason": true,
1437      "reasoning_levels": [
1438        "low",
1439        "medium",
1440        "high"
1441      ],
1442      "default_reasoning_effort": "medium",
1443      "supports_attachments": false,
1444      "options": {}
1445    },
1446    {
1447      "id": "minimax/minimax-m2.1-lightning",
1448      "name": "MiniMax M2.1 Lightning",
1449      "cost_per_1m_in": 0.3,
1450      "cost_per_1m_out": 2.4,
1451      "cost_per_1m_in_cached": 0.03,
1452      "cost_per_1m_out_cached": 0.375,
1453      "context_window": 204800,
1454      "default_max_tokens": 8000,
1455      "can_reason": true,
1456      "reasoning_levels": [
1457        "low",
1458        "medium",
1459        "high"
1460      ],
1461      "default_reasoning_effort": "medium",
1462      "supports_attachments": false,
1463      "options": {}
1464    },
1465    {
1466      "id": "mistral/ministral-3b",
1467      "name": "Ministral 3B",
1468      "cost_per_1m_in": 0.04,
1469      "cost_per_1m_out": 0.04,
1470      "cost_per_1m_in_cached": 0,
1471      "cost_per_1m_out_cached": 0,
1472      "context_window": 128000,
1473      "default_max_tokens": 4000,
1474      "can_reason": false,
1475      "supports_attachments": false,
1476      "options": {}
1477    },
1478    {
1479      "id": "mistral/ministral-8b",
1480      "name": "Ministral 8B",
1481      "cost_per_1m_in": 0.09999999999999999,
1482      "cost_per_1m_out": 0.09999999999999999,
1483      "cost_per_1m_in_cached": 0,
1484      "cost_per_1m_out_cached": 0,
1485      "context_window": 128000,
1486      "default_max_tokens": 4000,
1487      "can_reason": false,
1488      "supports_attachments": false,
1489      "options": {}
1490    },
1491    {
1492      "id": "mistral/codestral",
1493      "name": "Mistral Codestral",
1494      "cost_per_1m_in": 0.3,
1495      "cost_per_1m_out": 0.8999999999999999,
1496      "cost_per_1m_in_cached": 0,
1497      "cost_per_1m_out_cached": 0,
1498      "context_window": 128000,
1499      "default_max_tokens": 4000,
1500      "can_reason": false,
1501      "supports_attachments": false,
1502      "options": {}
1503    },
1504    {
1505      "id": "mistral/mistral-medium",
1506      "name": "Mistral Medium 3.1",
1507      "cost_per_1m_in": 0.39999999999999997,
1508      "cost_per_1m_out": 2,
1509      "cost_per_1m_in_cached": 0,
1510      "cost_per_1m_out_cached": 0,
1511      "context_window": 128000,
1512      "default_max_tokens": 8000,
1513      "can_reason": false,
1514      "supports_attachments": true,
1515      "options": {}
1516    },
1517    {
1518      "id": "mistral/mistral-small",
1519      "name": "Mistral Small",
1520      "cost_per_1m_in": 0.09999999999999999,
1521      "cost_per_1m_out": 0.3,
1522      "cost_per_1m_in_cached": 0,
1523      "cost_per_1m_out_cached": 0,
1524      "context_window": 32000,
1525      "default_max_tokens": 4000,
1526      "can_reason": false,
1527      "supports_attachments": true,
1528      "options": {}
1529    },
1530    {
1531      "id": "nvidia/nemotron-nano-12b-v2-vl",
1532      "name": "Nvidia Nemotron Nano 12B V2 VL",
1533      "cost_per_1m_in": 0.19999999999999998,
1534      "cost_per_1m_out": 0.6,
1535      "cost_per_1m_in_cached": 0,
1536      "cost_per_1m_out_cached": 0,
1537      "context_window": 131072,
1538      "default_max_tokens": 8000,
1539      "can_reason": true,
1540      "reasoning_levels": [
1541        "low",
1542        "medium",
1543        "high"
1544      ],
1545      "default_reasoning_effort": "medium",
1546      "supports_attachments": true,
1547      "options": {}
1548    },
1549    {
1550      "id": "nvidia/nemotron-nano-9b-v2",
1551      "name": "Nvidia Nemotron Nano 9B V2",
1552      "cost_per_1m_in": 0.04,
1553      "cost_per_1m_out": 0.16,
1554      "cost_per_1m_in_cached": 0,
1555      "cost_per_1m_out_cached": 0,
1556      "context_window": 131072,
1557      "default_max_tokens": 8000,
1558      "can_reason": true,
1559      "reasoning_levels": [
1560        "low",
1561        "medium",
1562        "high"
1563      ],
1564      "default_reasoning_effort": "medium",
1565      "supports_attachments": false,
1566      "options": {}
1567    },
1568    {
1569      "id": "mistral/pixtral-12b",
1570      "name": "Pixtral 12B 2409",
1571      "cost_per_1m_in": 0.15,
1572      "cost_per_1m_out": 0.15,
1573      "cost_per_1m_in_cached": 0,
1574      "cost_per_1m_out_cached": 0,
1575      "context_window": 128000,
1576      "default_max_tokens": 4000,
1577      "can_reason": false,
1578      "supports_attachments": true,
1579      "options": {}
1580    },
1581    {
1582      "id": "mistral/pixtral-large",
1583      "name": "Pixtral Large",
1584      "cost_per_1m_in": 2,
1585      "cost_per_1m_out": 6,
1586      "cost_per_1m_in_cached": 0,
1587      "cost_per_1m_out_cached": 0,
1588      "context_window": 128000,
1589      "default_max_tokens": 4000,
1590      "can_reason": false,
1591      "supports_attachments": true,
1592      "options": {}
1593    },
1594    {
1595      "id": "alibaba/qwen3-coder-30b-a3b",
1596      "name": "Qwen 3 Coder 30B A3B Instruct",
1597      "cost_per_1m_in": 0.07,
1598      "cost_per_1m_out": 0.27,
1599      "cost_per_1m_in_cached": 0,
1600      "cost_per_1m_out_cached": 0,
1601      "context_window": 160000,
1602      "default_max_tokens": 8000,
1603      "can_reason": true,
1604      "reasoning_levels": [
1605        "low",
1606        "medium",
1607        "high"
1608      ],
1609      "default_reasoning_effort": "medium",
1610      "supports_attachments": false,
1611      "options": {}
1612    },
1613    {
1614      "id": "alibaba/qwen-3-32b",
1615      "name": "Qwen 3.32B",
1616      "cost_per_1m_in": 0.09999999999999999,
1617      "cost_per_1m_out": 0.3,
1618      "cost_per_1m_in_cached": 0,
1619      "cost_per_1m_out_cached": 0,
1620      "context_window": 40960,
1621      "default_max_tokens": 8000,
1622      "can_reason": true,
1623      "reasoning_levels": [
1624        "low",
1625        "medium",
1626        "high"
1627      ],
1628      "default_reasoning_effort": "medium",
1629      "supports_attachments": false,
1630      "options": {}
1631    },
1632    {
1633      "id": "alibaba/qwen3-235b-a22b-thinking",
1634      "name": "Qwen3 235B A22B Thinking 2507",
1635      "cost_per_1m_in": 0.3,
1636      "cost_per_1m_out": 2.9000000000000004,
1637      "cost_per_1m_in_cached": 0,
1638      "cost_per_1m_out_cached": 0,
1639      "context_window": 262114,
1640      "default_max_tokens": 8000,
1641      "can_reason": true,
1642      "reasoning_levels": [
1643        "low",
1644        "medium",
1645        "high"
1646      ],
1647      "default_reasoning_effort": "medium",
1648      "supports_attachments": true,
1649      "options": {}
1650    },
1651    {
1652      "id": "alibaba/qwen-3-235b",
1653      "name": "Qwen3 235B A22b Instruct 2507",
1654      "cost_per_1m_in": 0.071,
1655      "cost_per_1m_out": 0.463,
1656      "cost_per_1m_in_cached": 0,
1657      "cost_per_1m_out_cached": 0,
1658      "context_window": 40960,
1659      "default_max_tokens": 8000,
1660      "can_reason": false,
1661      "supports_attachments": false,
1662      "options": {}
1663    },
1664    {
1665      "id": "alibaba/qwen3-coder",
1666      "name": "Qwen3 Coder 480B A35B Instruct",
1667      "cost_per_1m_in": 0.38,
1668      "cost_per_1m_out": 1.53,
1669      "cost_per_1m_in_cached": 0,
1670      "cost_per_1m_out_cached": 0,
1671      "context_window": 262144,
1672      "default_max_tokens": 8000,
1673      "can_reason": false,
1674      "supports_attachments": false,
1675      "options": {}
1676    },
1677    {
1678      "id": "alibaba/qwen3-coder-plus",
1679      "name": "Qwen3 Coder Plus",
1680      "cost_per_1m_in": 1,
1681      "cost_per_1m_out": 5,
1682      "cost_per_1m_in_cached": 0.19999999999999998,
1683      "cost_per_1m_out_cached": 0,
1684      "context_window": 1000000,
1685      "default_max_tokens": 8000,
1686      "can_reason": false,
1687      "supports_attachments": false,
1688      "options": {}
1689    },
1690    {
1691      "id": "alibaba/qwen3-max",
1692      "name": "Qwen3 Max",
1693      "cost_per_1m_in": 1.2,
1694      "cost_per_1m_out": 6,
1695      "cost_per_1m_in_cached": 0.24,
1696      "cost_per_1m_out_cached": 0,
1697      "context_window": 262144,
1698      "default_max_tokens": 8000,
1699      "can_reason": false,
1700      "supports_attachments": false,
1701      "options": {}
1702    },
1703    {
1704      "id": "alibaba/qwen3-max-preview",
1705      "name": "Qwen3 Max Preview",
1706      "cost_per_1m_in": 1.2,
1707      "cost_per_1m_out": 6,
1708      "cost_per_1m_in_cached": 0.24,
1709      "cost_per_1m_out_cached": 0,
1710      "context_window": 262144,
1711      "default_max_tokens": 8000,
1712      "can_reason": false,
1713      "supports_attachments": false,
1714      "options": {}
1715    },
1716    {
1717      "id": "alibaba/qwen-3-14b",
1718      "name": "Qwen3-14B",
1719      "cost_per_1m_in": 0.06,
1720      "cost_per_1m_out": 0.24,
1721      "cost_per_1m_in_cached": 0,
1722      "cost_per_1m_out_cached": 0,
1723      "context_window": 40960,
1724      "default_max_tokens": 8000,
1725      "can_reason": true,
1726      "reasoning_levels": [
1727        "low",
1728        "medium",
1729        "high"
1730      ],
1731      "default_reasoning_effort": "medium",
1732      "supports_attachments": false,
1733      "options": {}
1734    },
1735    {
1736      "id": "alibaba/qwen-3-30b",
1737      "name": "Qwen3-30B-A3B",
1738      "cost_per_1m_in": 0.08,
1739      "cost_per_1m_out": 0.29,
1740      "cost_per_1m_in_cached": 0,
1741      "cost_per_1m_out_cached": 0,
1742      "context_window": 40960,
1743      "default_max_tokens": 8000,
1744      "can_reason": true,
1745      "reasoning_levels": [
1746        "low",
1747        "medium",
1748        "high"
1749      ],
1750      "default_reasoning_effort": "medium",
1751      "supports_attachments": false,
1752      "options": {}
1753    },
1754    {
1755      "id": "bytedance/seed-1.6",
1756      "name": "Seed 1.6",
1757      "cost_per_1m_in": 0.25,
1758      "cost_per_1m_out": 2,
1759      "cost_per_1m_in_cached": 0.049999999999999996,
1760      "cost_per_1m_out_cached": 0,
1761      "context_window": 256000,
1762      "default_max_tokens": 8000,
1763      "can_reason": true,
1764      "reasoning_levels": [
1765        "low",
1766        "medium",
1767        "high"
1768      ],
1769      "default_reasoning_effort": "medium",
1770      "supports_attachments": false,
1771      "options": {}
1772    },
1773    {
1774      "id": "perplexity/sonar",
1775      "name": "Sonar",
1776      "cost_per_1m_in": 1,
1777      "cost_per_1m_out": 1,
1778      "cost_per_1m_in_cached": 0,
1779      "cost_per_1m_out_cached": 0,
1780      "context_window": 127000,
1781      "default_max_tokens": 8000,
1782      "can_reason": false,
1783      "supports_attachments": true,
1784      "options": {}
1785    },
1786    {
1787      "id": "perplexity/sonar-pro",
1788      "name": "Sonar Pro",
1789      "cost_per_1m_in": 3,
1790      "cost_per_1m_out": 15,
1791      "cost_per_1m_in_cached": 0,
1792      "cost_per_1m_out_cached": 0,
1793      "context_window": 200000,
1794      "default_max_tokens": 8000,
1795      "can_reason": false,
1796      "supports_attachments": true,
1797      "options": {}
1798    },
1799    {
1800      "id": "stealth/sonoma-dusk-alpha",
1801      "name": "Sonoma Dusk Alpha",
1802      "cost_per_1m_in": 0.19999999999999998,
1803      "cost_per_1m_out": 0.5,
1804      "cost_per_1m_in_cached": 0.049999999999999996,
1805      "cost_per_1m_out_cached": 0,
1806      "context_window": 2000000,
1807      "default_max_tokens": 8000,
1808      "can_reason": false,
1809      "supports_attachments": true,
1810      "options": {}
1811    },
1812    {
1813      "id": "stealth/sonoma-sky-alpha",
1814      "name": "Sonoma Sky Alpha",
1815      "cost_per_1m_in": 0.19999999999999998,
1816      "cost_per_1m_out": 0.5,
1817      "cost_per_1m_in_cached": 0.049999999999999996,
1818      "cost_per_1m_out_cached": 0,
1819      "context_window": 2000000,
1820      "default_max_tokens": 8000,
1821      "can_reason": false,
1822      "supports_attachments": true,
1823      "options": {}
1824    },
1825    {
1826      "id": "openai/gpt-oss-120b",
1827      "name": "gpt-oss-120b",
1828      "cost_per_1m_in": 0.09999999999999999,
1829      "cost_per_1m_out": 0.5,
1830      "cost_per_1m_in_cached": 0,
1831      "cost_per_1m_out_cached": 0,
1832      "context_window": 131072,
1833      "default_max_tokens": 8000,
1834      "can_reason": true,
1835      "reasoning_levels": [
1836        "low",
1837        "medium",
1838        "high"
1839      ],
1840      "default_reasoning_effort": "medium",
1841      "supports_attachments": false,
1842      "options": {}
1843    },
1844    {
1845      "id": "openai/gpt-oss-20b",
1846      "name": "gpt-oss-20b",
1847      "cost_per_1m_in": 0.07,
1848      "cost_per_1m_out": 0.3,
1849      "cost_per_1m_in_cached": 0,
1850      "cost_per_1m_out_cached": 0,
1851      "context_window": 128000,
1852      "default_max_tokens": 8000,
1853      "can_reason": true,
1854      "reasoning_levels": [
1855        "low",
1856        "medium",
1857        "high"
1858      ],
1859      "default_reasoning_effort": "medium",
1860      "supports_attachments": false,
1861      "options": {}
1862    },
1863    {
1864      "id": "openai/gpt-oss-safeguard-20b",
1865      "name": "gpt-oss-safeguard-20b",
1866      "cost_per_1m_in": 0.075,
1867      "cost_per_1m_out": 0.3,
1868      "cost_per_1m_in_cached": 0.037,
1869      "cost_per_1m_out_cached": 0,
1870      "context_window": 131072,
1871      "default_max_tokens": 8000,
1872      "can_reason": true,
1873      "reasoning_levels": [
1874        "low",
1875        "medium",
1876        "high"
1877      ],
1878      "default_reasoning_effort": "medium",
1879      "supports_attachments": false,
1880      "options": {}
1881    },
1882    {
1883      "id": "openai/o1",
1884      "name": "o1",
1885      "cost_per_1m_in": 15,
1886      "cost_per_1m_out": 60,
1887      "cost_per_1m_in_cached": 7.5,
1888      "cost_per_1m_out_cached": 0,
1889      "context_window": 200000,
1890      "default_max_tokens": 8000,
1891      "can_reason": true,
1892      "reasoning_levels": [
1893        "low",
1894        "medium",
1895        "high"
1896      ],
1897      "default_reasoning_effort": "medium",
1898      "supports_attachments": true,
1899      "options": {}
1900    },
1901    {
1902      "id": "openai/o3",
1903      "name": "o3",
1904      "cost_per_1m_in": 2,
1905      "cost_per_1m_out": 8,
1906      "cost_per_1m_in_cached": 0.5,
1907      "cost_per_1m_out_cached": 0,
1908      "context_window": 200000,
1909      "default_max_tokens": 8000,
1910      "can_reason": true,
1911      "reasoning_levels": [
1912        "low",
1913        "medium",
1914        "high"
1915      ],
1916      "default_reasoning_effort": "medium",
1917      "supports_attachments": true,
1918      "options": {}
1919    },
1920    {
1921      "id": "openai/o3-pro",
1922      "name": "o3 Pro",
1923      "cost_per_1m_in": 20,
1924      "cost_per_1m_out": 80,
1925      "cost_per_1m_in_cached": 0,
1926      "cost_per_1m_out_cached": 0,
1927      "context_window": 200000,
1928      "default_max_tokens": 8000,
1929      "can_reason": true,
1930      "reasoning_levels": [
1931        "low",
1932        "medium",
1933        "high"
1934      ],
1935      "default_reasoning_effort": "medium",
1936      "supports_attachments": true,
1937      "options": {}
1938    },
1939    {
1940      "id": "openai/o3-deep-research",
1941      "name": "o3-deep-research",
1942      "cost_per_1m_in": 10,
1943      "cost_per_1m_out": 40,
1944      "cost_per_1m_in_cached": 2.5,
1945      "cost_per_1m_out_cached": 0,
1946      "context_window": 200000,
1947      "default_max_tokens": 8000,
1948      "can_reason": true,
1949      "reasoning_levels": [
1950        "low",
1951        "medium",
1952        "high"
1953      ],
1954      "default_reasoning_effort": "medium",
1955      "supports_attachments": true,
1956      "options": {}
1957    },
1958    {
1959      "id": "openai/o3-mini",
1960      "name": "o3-mini",
1961      "cost_per_1m_in": 1.1,
1962      "cost_per_1m_out": 4.4,
1963      "cost_per_1m_in_cached": 0.55,
1964      "cost_per_1m_out_cached": 0,
1965      "context_window": 200000,
1966      "default_max_tokens": 8000,
1967      "can_reason": true,
1968      "reasoning_levels": [
1969        "low",
1970        "medium",
1971        "high"
1972      ],
1973      "default_reasoning_effort": "medium",
1974      "supports_attachments": false,
1975      "options": {}
1976    },
1977    {
1978      "id": "openai/o4-mini",
1979      "name": "o4-mini",
1980      "cost_per_1m_in": 1.1,
1981      "cost_per_1m_out": 4.4,
1982      "cost_per_1m_in_cached": 0.275,
1983      "cost_per_1m_out_cached": 0,
1984      "context_window": 200000,
1985      "default_max_tokens": 8000,
1986      "can_reason": true,
1987      "reasoning_levels": [
1988        "low",
1989        "medium",
1990        "high"
1991      ],
1992      "default_reasoning_effort": "medium",
1993      "supports_attachments": true,
1994      "options": {}
1995    },
1996    {
1997      "id": "vercel/v0-1.0-md",
1998      "name": "v0-1.0-md",
1999      "cost_per_1m_in": 3,
2000      "cost_per_1m_out": 15,
2001      "cost_per_1m_in_cached": 0,
2002      "cost_per_1m_out_cached": 0,
2003      "context_window": 128000,
2004      "default_max_tokens": 8000,
2005      "can_reason": false,
2006      "supports_attachments": true,
2007      "options": {}
2008    },
2009    {
2010      "id": "vercel/v0-1.5-md",
2011      "name": "v0-1.5-md",
2012      "cost_per_1m_in": 3,
2013      "cost_per_1m_out": 15,
2014      "cost_per_1m_in_cached": 0,
2015      "cost_per_1m_out_cached": 0,
2016      "context_window": 128000,
2017      "default_max_tokens": 8000,
2018      "can_reason": false,
2019      "supports_attachments": true,
2020      "options": {}
2021    }
2022  ],
2023  "default_headers": {
2024    "HTTP-Referer": "https://charm.land",
2025    "X-Title": "Crush"
2026  }
2027}