vercel.json

   1{
   2  "name": "Vercel",
   3  "id": "vercel",
   4  "api_key": "$VERCEL_API_KEY",
   5  "api_endpoint": "https://ai-gateway.vercel.sh/v1",
   6  "type": "openai-compat",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-haiku-4.5",
   9  "models": [
  10    {
  11      "id": "anthropic/claude-3-haiku",
  12      "name": "Claude 3 Haiku",
  13      "cost_per_1m_in": 0.25,
  14      "cost_per_1m_out": 1.25,
  15      "cost_per_1m_in_cached": 0.03,
  16      "cost_per_1m_out_cached": 0.3,
  17      "context_window": 200000,
  18      "default_max_tokens": 4096,
  19      "can_reason": false,
  20      "supports_attachments": true,
  21      "options": {}
  22    },
  23    {
  24      "id": "anthropic/claude-3.5-haiku",
  25      "name": "Claude 3.5 Haiku",
  26      "cost_per_1m_in": 0.7999999999999999,
  27      "cost_per_1m_out": 4,
  28      "cost_per_1m_in_cached": 0.08,
  29      "cost_per_1m_out_cached": 1,
  30      "context_window": 200000,
  31      "default_max_tokens": 8000,
  32      "can_reason": false,
  33      "supports_attachments": true,
  34      "options": {}
  35    },
  36    {
  37      "id": "anthropic/claude-3.5-sonnet",
  38      "name": "Claude 3.5 Sonnet",
  39      "cost_per_1m_in": 3,
  40      "cost_per_1m_out": 15,
  41      "cost_per_1m_in_cached": 0.3,
  42      "cost_per_1m_out_cached": 3.75,
  43      "context_window": 200000,
  44      "default_max_tokens": 8000,
  45      "can_reason": false,
  46      "supports_attachments": true,
  47      "options": {}
  48    },
  49    {
  50      "id": "anthropic/claude-3.5-sonnet-20240620",
  51      "name": "Claude 3.5 Sonnet (2024-06-20)",
  52      "cost_per_1m_in": 3,
  53      "cost_per_1m_out": 15,
  54      "cost_per_1m_in_cached": 0,
  55      "cost_per_1m_out_cached": 0,
  56      "context_window": 200000,
  57      "default_max_tokens": 8000,
  58      "can_reason": false,
  59      "supports_attachments": true,
  60      "options": {}
  61    },
  62    {
  63      "id": "anthropic/claude-3.7-sonnet",
  64      "name": "Claude 3.7 Sonnet",
  65      "cost_per_1m_in": 3,
  66      "cost_per_1m_out": 15,
  67      "cost_per_1m_in_cached": 0.3,
  68      "cost_per_1m_out_cached": 3.75,
  69      "context_window": 200000,
  70      "default_max_tokens": 8000,
  71      "can_reason": true,
  72      "reasoning_levels": [
  73        "low",
  74        "medium",
  75        "high"
  76      ],
  77      "default_reasoning_effort": "medium",
  78      "supports_attachments": true,
  79      "options": {}
  80    },
  81    {
  82      "id": "anthropic/claude-haiku-4.5",
  83      "name": "Claude Haiku 4.5",
  84      "cost_per_1m_in": 1,
  85      "cost_per_1m_out": 5,
  86      "cost_per_1m_in_cached": 0.09999999999999999,
  87      "cost_per_1m_out_cached": 1.25,
  88      "context_window": 200000,
  89      "default_max_tokens": 8000,
  90      "can_reason": true,
  91      "reasoning_levels": [
  92        "low",
  93        "medium",
  94        "high"
  95      ],
  96      "default_reasoning_effort": "medium",
  97      "supports_attachments": true,
  98      "options": {}
  99    },
 100    {
 101      "id": "anthropic/claude-opus-4",
 102      "name": "Claude Opus 4",
 103      "cost_per_1m_in": 15,
 104      "cost_per_1m_out": 75,
 105      "cost_per_1m_in_cached": 1.5,
 106      "cost_per_1m_out_cached": 18.75,
 107      "context_window": 200000,
 108      "default_max_tokens": 8000,
 109      "can_reason": true,
 110      "reasoning_levels": [
 111        "low",
 112        "medium",
 113        "high"
 114      ],
 115      "default_reasoning_effort": "medium",
 116      "supports_attachments": true,
 117      "options": {}
 118    },
 119    {
 120      "id": "anthropic/claude-opus-4.1",
 121      "name": "Claude Opus 4.1",
 122      "cost_per_1m_in": 15,
 123      "cost_per_1m_out": 75,
 124      "cost_per_1m_in_cached": 1.5,
 125      "cost_per_1m_out_cached": 18.75,
 126      "context_window": 200000,
 127      "default_max_tokens": 8000,
 128      "can_reason": true,
 129      "reasoning_levels": [
 130        "low",
 131        "medium",
 132        "high"
 133      ],
 134      "default_reasoning_effort": "medium",
 135      "supports_attachments": true,
 136      "options": {}
 137    },
 138    {
 139      "id": "anthropic/claude-opus-4.5",
 140      "name": "Claude Opus 4.5",
 141      "cost_per_1m_in": 5,
 142      "cost_per_1m_out": 25,
 143      "cost_per_1m_in_cached": 0.5,
 144      "cost_per_1m_out_cached": 6.25,
 145      "context_window": 200000,
 146      "default_max_tokens": 8000,
 147      "can_reason": true,
 148      "reasoning_levels": [
 149        "low",
 150        "medium",
 151        "high"
 152      ],
 153      "default_reasoning_effort": "medium",
 154      "supports_attachments": true,
 155      "options": {}
 156    },
 157    {
 158      "id": "anthropic/claude-sonnet-4",
 159      "name": "Claude Sonnet 4",
 160      "cost_per_1m_in": 3,
 161      "cost_per_1m_out": 15,
 162      "cost_per_1m_in_cached": 0.3,
 163      "cost_per_1m_out_cached": 3.75,
 164      "context_window": 1000000,
 165      "default_max_tokens": 8000,
 166      "can_reason": true,
 167      "reasoning_levels": [
 168        "low",
 169        "medium",
 170        "high"
 171      ],
 172      "default_reasoning_effort": "medium",
 173      "supports_attachments": true,
 174      "options": {}
 175    },
 176    {
 177      "id": "anthropic/claude-sonnet-4.5",
 178      "name": "Claude Sonnet 4.5",
 179      "cost_per_1m_in": 3,
 180      "cost_per_1m_out": 15,
 181      "cost_per_1m_in_cached": 0.3,
 182      "cost_per_1m_out_cached": 3.75,
 183      "context_window": 1000000,
 184      "default_max_tokens": 8000,
 185      "can_reason": true,
 186      "reasoning_levels": [
 187        "low",
 188        "medium",
 189        "high"
 190      ],
 191      "default_reasoning_effort": "medium",
 192      "supports_attachments": true,
 193      "options": {}
 194    },
 195    {
 196      "id": "openai/codex-mini",
 197      "name": "Codex Mini",
 198      "cost_per_1m_in": 1.5,
 199      "cost_per_1m_out": 6,
 200      "cost_per_1m_in_cached": 0.375,
 201      "cost_per_1m_out_cached": 0,
 202      "context_window": 200000,
 203      "default_max_tokens": 8000,
 204      "can_reason": true,
 205      "reasoning_levels": [
 206        "low",
 207        "medium",
 208        "high"
 209      ],
 210      "default_reasoning_effort": "medium",
 211      "supports_attachments": true,
 212      "options": {}
 213    },
 214    {
 215      "id": "cohere/command-a",
 216      "name": "Command A",
 217      "cost_per_1m_in": 2.5,
 218      "cost_per_1m_out": 10,
 219      "cost_per_1m_in_cached": 0,
 220      "cost_per_1m_out_cached": 0,
 221      "context_window": 256000,
 222      "default_max_tokens": 8000,
 223      "can_reason": false,
 224      "supports_attachments": false,
 225      "options": {}
 226    },
 227    {
 228      "id": "deepseek/deepseek-v3",
 229      "name": "DeepSeek V3 0324",
 230      "cost_per_1m_in": 0.77,
 231      "cost_per_1m_out": 0.77,
 232      "cost_per_1m_in_cached": 0,
 233      "cost_per_1m_out_cached": 0,
 234      "context_window": 163840,
 235      "default_max_tokens": 8000,
 236      "can_reason": false,
 237      "supports_attachments": false,
 238      "options": {}
 239    },
 240    {
 241      "id": "deepseek/deepseek-v3.1-terminus",
 242      "name": "DeepSeek V3.1 Terminus",
 243      "cost_per_1m_in": 0.27,
 244      "cost_per_1m_out": 1,
 245      "cost_per_1m_in_cached": 0,
 246      "cost_per_1m_out_cached": 0,
 247      "context_window": 131072,
 248      "default_max_tokens": 8000,
 249      "can_reason": true,
 250      "reasoning_levels": [
 251        "low",
 252        "medium",
 253        "high"
 254      ],
 255      "default_reasoning_effort": "medium",
 256      "supports_attachments": false,
 257      "options": {}
 258    },
 259    {
 260      "id": "deepseek/deepseek-v3.2-exp",
 261      "name": "DeepSeek V3.2 Exp",
 262      "cost_per_1m_in": 0.27,
 263      "cost_per_1m_out": 0.39999999999999997,
 264      "cost_per_1m_in_cached": 0,
 265      "cost_per_1m_out_cached": 0,
 266      "context_window": 163840,
 267      "default_max_tokens": 8000,
 268      "can_reason": true,
 269      "reasoning_levels": [
 270        "low",
 271        "medium",
 272        "high"
 273      ],
 274      "default_reasoning_effort": "medium",
 275      "supports_attachments": false,
 276      "options": {}
 277    },
 278    {
 279      "id": "deepseek/deepseek-v3.2-thinking",
 280      "name": "DeepSeek V3.2 Thinking",
 281      "cost_per_1m_in": 0.28,
 282      "cost_per_1m_out": 0.42,
 283      "cost_per_1m_in_cached": 0.028,
 284      "cost_per_1m_out_cached": 0,
 285      "context_window": 128000,
 286      "default_max_tokens": 8000,
 287      "can_reason": true,
 288      "reasoning_levels": [
 289        "low",
 290        "medium",
 291        "high"
 292      ],
 293      "default_reasoning_effort": "medium",
 294      "supports_attachments": false,
 295      "options": {}
 296    },
 297    {
 298      "id": "deepseek/deepseek-v3.1",
 299      "name": "DeepSeek-V3.1",
 300      "cost_per_1m_in": 0.3,
 301      "cost_per_1m_out": 1,
 302      "cost_per_1m_in_cached": 0,
 303      "cost_per_1m_out_cached": 0,
 304      "context_window": 163840,
 305      "default_max_tokens": 8000,
 306      "can_reason": true,
 307      "reasoning_levels": [
 308        "low",
 309        "medium",
 310        "high"
 311      ],
 312      "default_reasoning_effort": "medium",
 313      "supports_attachments": false,
 314      "options": {}
 315    },
 316    {
 317      "id": "mistral/devstral-2",
 318      "name": "Devstral 2",
 319      "cost_per_1m_in": 0,
 320      "cost_per_1m_out": 0,
 321      "cost_per_1m_in_cached": 0,
 322      "cost_per_1m_out_cached": 0,
 323      "context_window": 256000,
 324      "default_max_tokens": 8000,
 325      "can_reason": false,
 326      "supports_attachments": false,
 327      "options": {}
 328    },
 329    {
 330      "id": "mistral/devstral-small",
 331      "name": "Devstral Small 1.1",
 332      "cost_per_1m_in": 0.09999999999999999,
 333      "cost_per_1m_out": 0.3,
 334      "cost_per_1m_in_cached": 0,
 335      "cost_per_1m_out_cached": 0,
 336      "context_window": 128000,
 337      "default_max_tokens": 8000,
 338      "can_reason": false,
 339      "supports_attachments": false,
 340      "options": {}
 341    },
 342    {
 343      "id": "mistral/devstral-small-2",
 344      "name": "Devstral Small 2",
 345      "cost_per_1m_in": 0,
 346      "cost_per_1m_out": 0,
 347      "cost_per_1m_in_cached": 0,
 348      "cost_per_1m_out_cached": 0,
 349      "context_window": 256000,
 350      "default_max_tokens": 8000,
 351      "can_reason": false,
 352      "supports_attachments": false,
 353      "options": {}
 354    },
 355    {
 356      "id": "zai/glm-4.5-air",
 357      "name": "GLM 4.5 Air",
 358      "cost_per_1m_in": 0.19999999999999998,
 359      "cost_per_1m_out": 1.1,
 360      "cost_per_1m_in_cached": 0.03,
 361      "cost_per_1m_out_cached": 0,
 362      "context_window": 128000,
 363      "default_max_tokens": 8000,
 364      "can_reason": true,
 365      "reasoning_levels": [
 366        "low",
 367        "medium",
 368        "high"
 369      ],
 370      "default_reasoning_effort": "medium",
 371      "supports_attachments": false,
 372      "options": {}
 373    },
 374    {
 375      "id": "zai/glm-4.5v",
 376      "name": "GLM 4.5V",
 377      "cost_per_1m_in": 0.6,
 378      "cost_per_1m_out": 1.7999999999999998,
 379      "cost_per_1m_in_cached": 0,
 380      "cost_per_1m_out_cached": 0,
 381      "context_window": 65536,
 382      "default_max_tokens": 8000,
 383      "can_reason": true,
 384      "reasoning_levels": [
 385        "low",
 386        "medium",
 387        "high"
 388      ],
 389      "default_reasoning_effort": "medium",
 390      "supports_attachments": true,
 391      "options": {}
 392    },
 393    {
 394      "id": "zai/glm-4.6",
 395      "name": "GLM 4.6",
 396      "cost_per_1m_in": 0.44999999999999996,
 397      "cost_per_1m_out": 1.7999999999999998,
 398      "cost_per_1m_in_cached": 0.11,
 399      "cost_per_1m_out_cached": 0,
 400      "context_window": 200000,
 401      "default_max_tokens": 8000,
 402      "can_reason": true,
 403      "reasoning_levels": [
 404        "low",
 405        "medium",
 406        "high"
 407      ],
 408      "default_reasoning_effort": "medium",
 409      "supports_attachments": false,
 410      "options": {}
 411    },
 412    {
 413      "id": "zai/glm-4.7",
 414      "name": "GLM 4.7",
 415      "cost_per_1m_in": 0.43,
 416      "cost_per_1m_out": 1.75,
 417      "cost_per_1m_in_cached": 0.08,
 418      "cost_per_1m_out_cached": 0,
 419      "context_window": 202752,
 420      "default_max_tokens": 8000,
 421      "can_reason": true,
 422      "reasoning_levels": [
 423        "low",
 424        "medium",
 425        "high"
 426      ],
 427      "default_reasoning_effort": "medium",
 428      "supports_attachments": false,
 429      "options": {}
 430    },
 431    {
 432      "id": "zai/glm-4.7-flashx",
 433      "name": "GLM 4.7 FlashX",
 434      "cost_per_1m_in": 0.06,
 435      "cost_per_1m_out": 0.39999999999999997,
 436      "cost_per_1m_in_cached": 0.01,
 437      "cost_per_1m_out_cached": 0,
 438      "context_window": 200000,
 439      "default_max_tokens": 8000,
 440      "can_reason": true,
 441      "reasoning_levels": [
 442        "low",
 443        "medium",
 444        "high"
 445      ],
 446      "default_reasoning_effort": "medium",
 447      "supports_attachments": false,
 448      "options": {}
 449    },
 450    {
 451      "id": "zai/glm-4.5",
 452      "name": "GLM-4.5",
 453      "cost_per_1m_in": 0.6,
 454      "cost_per_1m_out": 2.2,
 455      "cost_per_1m_in_cached": 0,
 456      "cost_per_1m_out_cached": 0,
 457      "context_window": 131072,
 458      "default_max_tokens": 8000,
 459      "can_reason": true,
 460      "reasoning_levels": [
 461        "low",
 462        "medium",
 463        "high"
 464      ],
 465      "default_reasoning_effort": "medium",
 466      "supports_attachments": false,
 467      "options": {}
 468    },
 469    {
 470      "id": "zai/glm-4.6v",
 471      "name": "GLM-4.6V",
 472      "cost_per_1m_in": 0.3,
 473      "cost_per_1m_out": 0.8999999999999999,
 474      "cost_per_1m_in_cached": 0.049999999999999996,
 475      "cost_per_1m_out_cached": 0,
 476      "context_window": 128000,
 477      "default_max_tokens": 8000,
 478      "can_reason": true,
 479      "reasoning_levels": [
 480        "low",
 481        "medium",
 482        "high"
 483      ],
 484      "default_reasoning_effort": "medium",
 485      "supports_attachments": true,
 486      "options": {}
 487    },
 488    {
 489      "id": "zai/glm-4.6v-flash",
 490      "name": "GLM-4.6V-Flash",
 491      "cost_per_1m_in": 0,
 492      "cost_per_1m_out": 0,
 493      "cost_per_1m_in_cached": 0,
 494      "cost_per_1m_out_cached": 0,
 495      "context_window": 128000,
 496      "default_max_tokens": 8000,
 497      "can_reason": true,
 498      "reasoning_levels": [
 499        "low",
 500        "medium",
 501        "high"
 502      ],
 503      "default_reasoning_effort": "medium",
 504      "supports_attachments": true,
 505      "options": {}
 506    },
 507    {
 508      "id": "openai/gpt-5.1-codex-max",
 509      "name": "GPT 5.1 Codex Max",
 510      "cost_per_1m_in": 1.25,
 511      "cost_per_1m_out": 10,
 512      "cost_per_1m_in_cached": 0.125,
 513      "cost_per_1m_out_cached": 0,
 514      "context_window": 400000,
 515      "default_max_tokens": 8000,
 516      "can_reason": true,
 517      "reasoning_levels": [
 518        "low",
 519        "medium",
 520        "high"
 521      ],
 522      "default_reasoning_effort": "medium",
 523      "supports_attachments": true,
 524      "options": {}
 525    },
 526    {
 527      "id": "openai/gpt-5.1-thinking",
 528      "name": "GPT 5.1 Thinking",
 529      "cost_per_1m_in": 1.25,
 530      "cost_per_1m_out": 10,
 531      "cost_per_1m_in_cached": 0.13,
 532      "cost_per_1m_out_cached": 0,
 533      "context_window": 400000,
 534      "default_max_tokens": 8000,
 535      "can_reason": true,
 536      "reasoning_levels": [
 537        "low",
 538        "medium",
 539        "high"
 540      ],
 541      "default_reasoning_effort": "medium",
 542      "supports_attachments": true,
 543      "options": {}
 544    },
 545    {
 546      "id": "openai/gpt-5.2",
 547      "name": "GPT 5.2",
 548      "cost_per_1m_in": 1.75,
 549      "cost_per_1m_out": 14,
 550      "cost_per_1m_in_cached": 0.18,
 551      "cost_per_1m_out_cached": 0,
 552      "context_window": 400000,
 553      "default_max_tokens": 8000,
 554      "can_reason": true,
 555      "reasoning_levels": [
 556        "low",
 557        "medium",
 558        "high"
 559      ],
 560      "default_reasoning_effort": "medium",
 561      "supports_attachments": true,
 562      "options": {}
 563    },
 564    {
 565      "id": "openai/gpt-5.2-pro",
 566      "name": "GPT 5.2 ",
 567      "cost_per_1m_in": 21,
 568      "cost_per_1m_out": 168,
 569      "cost_per_1m_in_cached": 0,
 570      "cost_per_1m_out_cached": 0,
 571      "context_window": 400000,
 572      "default_max_tokens": 8000,
 573      "can_reason": true,
 574      "reasoning_levels": [
 575        "low",
 576        "medium",
 577        "high"
 578      ],
 579      "default_reasoning_effort": "medium",
 580      "supports_attachments": true,
 581      "options": {}
 582    },
 583    {
 584      "id": "openai/gpt-4-turbo",
 585      "name": "GPT-4 Turbo",
 586      "cost_per_1m_in": 10,
 587      "cost_per_1m_out": 30,
 588      "cost_per_1m_in_cached": 0,
 589      "cost_per_1m_out_cached": 0,
 590      "context_window": 128000,
 591      "default_max_tokens": 4096,
 592      "can_reason": false,
 593      "supports_attachments": true,
 594      "options": {}
 595    },
 596    {
 597      "id": "openai/gpt-4.1",
 598      "name": "GPT-4.1",
 599      "cost_per_1m_in": 2,
 600      "cost_per_1m_out": 8,
 601      "cost_per_1m_in_cached": 0.5,
 602      "cost_per_1m_out_cached": 0,
 603      "context_window": 1047576,
 604      "default_max_tokens": 8000,
 605      "can_reason": false,
 606      "supports_attachments": true,
 607      "options": {}
 608    },
 609    {
 610      "id": "openai/gpt-4.1-mini",
 611      "name": "GPT-4.1 mini",
 612      "cost_per_1m_in": 0.39999999999999997,
 613      "cost_per_1m_out": 1.5999999999999999,
 614      "cost_per_1m_in_cached": 0.09999999999999999,
 615      "cost_per_1m_out_cached": 0,
 616      "context_window": 1047576,
 617      "default_max_tokens": 8000,
 618      "can_reason": false,
 619      "supports_attachments": true,
 620      "options": {}
 621    },
 622    {
 623      "id": "openai/gpt-4.1-nano",
 624      "name": "GPT-4.1 nano",
 625      "cost_per_1m_in": 0.09999999999999999,
 626      "cost_per_1m_out": 0.39999999999999997,
 627      "cost_per_1m_in_cached": 0.03,
 628      "cost_per_1m_out_cached": 0,
 629      "context_window": 1047576,
 630      "default_max_tokens": 8000,
 631      "can_reason": false,
 632      "supports_attachments": true,
 633      "options": {}
 634    },
 635    {
 636      "id": "openai/gpt-4o",
 637      "name": "GPT-4o",
 638      "cost_per_1m_in": 2.5,
 639      "cost_per_1m_out": 10,
 640      "cost_per_1m_in_cached": 1.25,
 641      "cost_per_1m_out_cached": 0,
 642      "context_window": 128000,
 643      "default_max_tokens": 8000,
 644      "can_reason": false,
 645      "supports_attachments": true,
 646      "options": {}
 647    },
 648    {
 649      "id": "openai/gpt-4o-mini",
 650      "name": "GPT-4o mini",
 651      "cost_per_1m_in": 0.15,
 652      "cost_per_1m_out": 0.6,
 653      "cost_per_1m_in_cached": 0.075,
 654      "cost_per_1m_out_cached": 0,
 655      "context_window": 128000,
 656      "default_max_tokens": 8000,
 657      "can_reason": false,
 658      "supports_attachments": true,
 659      "options": {}
 660    },
 661    {
 662      "id": "openai/gpt-5",
 663      "name": "GPT-5",
 664      "cost_per_1m_in": 1.25,
 665      "cost_per_1m_out": 10,
 666      "cost_per_1m_in_cached": 0.13,
 667      "cost_per_1m_out_cached": 0,
 668      "context_window": 400000,
 669      "default_max_tokens": 8000,
 670      "can_reason": true,
 671      "reasoning_levels": [
 672        "low",
 673        "medium",
 674        "high"
 675      ],
 676      "default_reasoning_effort": "medium",
 677      "supports_attachments": true,
 678      "options": {}
 679    },
 680    {
 681      "id": "openai/gpt-5-chat",
 682      "name": "GPT-5 Chat",
 683      "cost_per_1m_in": 1.25,
 684      "cost_per_1m_out": 10,
 685      "cost_per_1m_in_cached": 0.125,
 686      "cost_per_1m_out_cached": 0,
 687      "context_window": 128000,
 688      "default_max_tokens": 8000,
 689      "can_reason": true,
 690      "reasoning_levels": [
 691        "low",
 692        "medium",
 693        "high"
 694      ],
 695      "default_reasoning_effort": "medium",
 696      "supports_attachments": true,
 697      "options": {}
 698    },
 699    {
 700      "id": "openai/gpt-5-mini",
 701      "name": "GPT-5 mini",
 702      "cost_per_1m_in": 0.25,
 703      "cost_per_1m_out": 2,
 704      "cost_per_1m_in_cached": 0.03,
 705      "cost_per_1m_out_cached": 0,
 706      "context_window": 400000,
 707      "default_max_tokens": 8000,
 708      "can_reason": true,
 709      "reasoning_levels": [
 710        "low",
 711        "medium",
 712        "high"
 713      ],
 714      "default_reasoning_effort": "medium",
 715      "supports_attachments": true,
 716      "options": {}
 717    },
 718    {
 719      "id": "openai/gpt-5-nano",
 720      "name": "GPT-5 nano",
 721      "cost_per_1m_in": 0.049999999999999996,
 722      "cost_per_1m_out": 0.39999999999999997,
 723      "cost_per_1m_in_cached": 0.01,
 724      "cost_per_1m_out_cached": 0,
 725      "context_window": 400000,
 726      "default_max_tokens": 8000,
 727      "can_reason": true,
 728      "reasoning_levels": [
 729        "low",
 730        "medium",
 731        "high"
 732      ],
 733      "default_reasoning_effort": "medium",
 734      "supports_attachments": true,
 735      "options": {}
 736    },
 737    {
 738      "id": "openai/gpt-5-pro",
 739      "name": "GPT-5 pro",
 740      "cost_per_1m_in": 15,
 741      "cost_per_1m_out": 120,
 742      "cost_per_1m_in_cached": 0,
 743      "cost_per_1m_out_cached": 0,
 744      "context_window": 400000,
 745      "default_max_tokens": 8000,
 746      "can_reason": true,
 747      "reasoning_levels": [
 748        "low",
 749        "medium",
 750        "high"
 751      ],
 752      "default_reasoning_effort": "medium",
 753      "supports_attachments": true,
 754      "options": {}
 755    },
 756    {
 757      "id": "openai/gpt-5-codex",
 758      "name": "GPT-5-Codex",
 759      "cost_per_1m_in": 1.25,
 760      "cost_per_1m_out": 10,
 761      "cost_per_1m_in_cached": 0.13,
 762      "cost_per_1m_out_cached": 0,
 763      "context_window": 400000,
 764      "default_max_tokens": 8000,
 765      "can_reason": true,
 766      "reasoning_levels": [
 767        "low",
 768        "medium",
 769        "high"
 770      ],
 771      "default_reasoning_effort": "medium",
 772      "supports_attachments": true,
 773      "options": {}
 774    },
 775    {
 776      "id": "openai/gpt-5.1-codex-mini",
 777      "name": "GPT-5.1 Codex mini",
 778      "cost_per_1m_in": 0.25,
 779      "cost_per_1m_out": 2,
 780      "cost_per_1m_in_cached": 0.024999999999999998,
 781      "cost_per_1m_out_cached": 0,
 782      "context_window": 400000,
 783      "default_max_tokens": 8000,
 784      "can_reason": true,
 785      "reasoning_levels": [
 786        "low",
 787        "medium",
 788        "high"
 789      ],
 790      "default_reasoning_effort": "medium",
 791      "supports_attachments": true,
 792      "options": {}
 793    },
 794    {
 795      "id": "openai/gpt-5.1-instant",
 796      "name": "GPT-5.1 Instant",
 797      "cost_per_1m_in": 1.25,
 798      "cost_per_1m_out": 10,
 799      "cost_per_1m_in_cached": 0.13,
 800      "cost_per_1m_out_cached": 0,
 801      "context_window": 128000,
 802      "default_max_tokens": 8000,
 803      "can_reason": true,
 804      "reasoning_levels": [
 805        "low",
 806        "medium",
 807        "high"
 808      ],
 809      "default_reasoning_effort": "medium",
 810      "supports_attachments": true,
 811      "options": {}
 812    },
 813    {
 814      "id": "openai/gpt-5.1-codex",
 815      "name": "GPT-5.1-Codex",
 816      "cost_per_1m_in": 1.25,
 817      "cost_per_1m_out": 10,
 818      "cost_per_1m_in_cached": 0.125,
 819      "cost_per_1m_out_cached": 0,
 820      "context_window": 400000,
 821      "default_max_tokens": 8000,
 822      "can_reason": true,
 823      "reasoning_levels": [
 824        "low",
 825        "medium",
 826        "high"
 827      ],
 828      "default_reasoning_effort": "medium",
 829      "supports_attachments": true,
 830      "options": {}
 831    },
 832    {
 833      "id": "openai/gpt-5.2-chat",
 834      "name": "GPT-5.2 Chat",
 835      "cost_per_1m_in": 1.75,
 836      "cost_per_1m_out": 14,
 837      "cost_per_1m_in_cached": 0.175,
 838      "cost_per_1m_out_cached": 0,
 839      "context_window": 128000,
 840      "default_max_tokens": 8000,
 841      "can_reason": true,
 842      "reasoning_levels": [
 843        "low",
 844        "medium",
 845        "high"
 846      ],
 847      "default_reasoning_effort": "medium",
 848      "supports_attachments": true,
 849      "options": {}
 850    },
 851    {
 852      "id": "openai/gpt-5.2-codex",
 853      "name": "GPT-5.2-Codex",
 854      "cost_per_1m_in": 1.75,
 855      "cost_per_1m_out": 14,
 856      "cost_per_1m_in_cached": 0.175,
 857      "cost_per_1m_out_cached": 0,
 858      "context_window": 400000,
 859      "default_max_tokens": 8000,
 860      "can_reason": true,
 861      "reasoning_levels": [
 862        "low",
 863        "medium",
 864        "high"
 865      ],
 866      "default_reasoning_effort": "medium",
 867      "supports_attachments": true,
 868      "options": {}
 869    },
 870    {
 871      "id": "google/gemini-2.5-flash",
 872      "name": "Gemini 2.5 Flash",
 873      "cost_per_1m_in": 0.3,
 874      "cost_per_1m_out": 2.5,
 875      "cost_per_1m_in_cached": 0,
 876      "cost_per_1m_out_cached": 0,
 877      "context_window": 1000000,
 878      "default_max_tokens": 8000,
 879      "can_reason": true,
 880      "reasoning_levels": [
 881        "low",
 882        "medium",
 883        "high"
 884      ],
 885      "default_reasoning_effort": "medium",
 886      "supports_attachments": false,
 887      "options": {}
 888    },
 889    {
 890      "id": "google/gemini-2.5-flash-lite",
 891      "name": "Gemini 2.5 Flash Lite",
 892      "cost_per_1m_in": 0.09999999999999999,
 893      "cost_per_1m_out": 0.39999999999999997,
 894      "cost_per_1m_in_cached": 0.01,
 895      "cost_per_1m_out_cached": 0,
 896      "context_window": 1048576,
 897      "default_max_tokens": 8000,
 898      "can_reason": true,
 899      "reasoning_levels": [
 900        "low",
 901        "medium",
 902        "high"
 903      ],
 904      "default_reasoning_effort": "medium",
 905      "supports_attachments": true,
 906      "options": {}
 907    },
 908    {
 909      "id": "google/gemini-2.5-flash-lite-preview-09-2025",
 910      "name": "Gemini 2.5 Flash Lite Preview 09-2025",
 911      "cost_per_1m_in": 0.09999999999999999,
 912      "cost_per_1m_out": 0.39999999999999997,
 913      "cost_per_1m_in_cached": 0.01,
 914      "cost_per_1m_out_cached": 0,
 915      "context_window": 1048576,
 916      "default_max_tokens": 8000,
 917      "can_reason": true,
 918      "reasoning_levels": [
 919        "low",
 920        "medium",
 921        "high"
 922      ],
 923      "default_reasoning_effort": "medium",
 924      "supports_attachments": true,
 925      "options": {}
 926    },
 927    {
 928      "id": "google/gemini-2.5-flash-preview-09-2025",
 929      "name": "Gemini 2.5 Flash Preview 09-2025",
 930      "cost_per_1m_in": 0.3,
 931      "cost_per_1m_out": 2.5,
 932      "cost_per_1m_in_cached": 0.03,
 933      "cost_per_1m_out_cached": 0,
 934      "context_window": 1000000,
 935      "default_max_tokens": 8000,
 936      "can_reason": true,
 937      "reasoning_levels": [
 938        "low",
 939        "medium",
 940        "high"
 941      ],
 942      "default_reasoning_effort": "medium",
 943      "supports_attachments": true,
 944      "options": {}
 945    },
 946    {
 947      "id": "google/gemini-2.5-pro",
 948      "name": "Gemini 2.5 Pro",
 949      "cost_per_1m_in": 1.25,
 950      "cost_per_1m_out": 10,
 951      "cost_per_1m_in_cached": 0,
 952      "cost_per_1m_out_cached": 0,
 953      "context_window": 1048576,
 954      "default_max_tokens": 8000,
 955      "can_reason": true,
 956      "reasoning_levels": [
 957        "low",
 958        "medium",
 959        "high"
 960      ],
 961      "default_reasoning_effort": "medium",
 962      "supports_attachments": false,
 963      "options": {}
 964    },
 965    {
 966      "id": "google/gemini-3-flash",
 967      "name": "Gemini 3 Flash",
 968      "cost_per_1m_in": 0.5,
 969      "cost_per_1m_out": 3,
 970      "cost_per_1m_in_cached": 0.049999999999999996,
 971      "cost_per_1m_out_cached": 0,
 972      "context_window": 1000000,
 973      "default_max_tokens": 8000,
 974      "can_reason": true,
 975      "reasoning_levels": [
 976        "low",
 977        "medium",
 978        "high"
 979      ],
 980      "default_reasoning_effort": "medium",
 981      "supports_attachments": true,
 982      "options": {}
 983    },
 984    {
 985      "id": "google/gemini-3-pro-preview",
 986      "name": "Gemini 3 Pro Preview",
 987      "cost_per_1m_in": 2,
 988      "cost_per_1m_out": 12,
 989      "cost_per_1m_in_cached": 0.19999999999999998,
 990      "cost_per_1m_out_cached": 0,
 991      "context_window": 1000000,
 992      "default_max_tokens": 8000,
 993      "can_reason": true,
 994      "reasoning_levels": [
 995        "low",
 996        "medium",
 997        "high"
 998      ],
 999      "default_reasoning_effort": "medium",
1000      "supports_attachments": true,
1001      "options": {}
1002    },
1003    {
1004      "id": "xai/grok-2-vision",
1005      "name": "Grok 2 Vision",
1006      "cost_per_1m_in": 2,
1007      "cost_per_1m_out": 10,
1008      "cost_per_1m_in_cached": 0,
1009      "cost_per_1m_out_cached": 0,
1010      "context_window": 32768,
1011      "default_max_tokens": 8000,
1012      "can_reason": false,
1013      "supports_attachments": true,
1014      "options": {}
1015    },
1016    {
1017      "id": "xai/grok-3",
1018      "name": "Grok 3 Beta",
1019      "cost_per_1m_in": 3,
1020      "cost_per_1m_out": 15,
1021      "cost_per_1m_in_cached": 0,
1022      "cost_per_1m_out_cached": 0,
1023      "context_window": 131072,
1024      "default_max_tokens": 8000,
1025      "can_reason": false,
1026      "supports_attachments": false,
1027      "options": {}
1028    },
1029    {
1030      "id": "xai/grok-3-fast",
1031      "name": "Grok 3 Fast Beta",
1032      "cost_per_1m_in": 5,
1033      "cost_per_1m_out": 25,
1034      "cost_per_1m_in_cached": 0,
1035      "cost_per_1m_out_cached": 0,
1036      "context_window": 131072,
1037      "default_max_tokens": 8000,
1038      "can_reason": false,
1039      "supports_attachments": false,
1040      "options": {}
1041    },
1042    {
1043      "id": "xai/grok-3-mini",
1044      "name": "Grok 3 Mini Beta",
1045      "cost_per_1m_in": 0.3,
1046      "cost_per_1m_out": 0.5,
1047      "cost_per_1m_in_cached": 0,
1048      "cost_per_1m_out_cached": 0,
1049      "context_window": 131072,
1050      "default_max_tokens": 8000,
1051      "can_reason": false,
1052      "supports_attachments": false,
1053      "options": {}
1054    },
1055    {
1056      "id": "xai/grok-3-mini-fast",
1057      "name": "Grok 3 Mini Fast Beta",
1058      "cost_per_1m_in": 0.6,
1059      "cost_per_1m_out": 4,
1060      "cost_per_1m_in_cached": 0,
1061      "cost_per_1m_out_cached": 0,
1062      "context_window": 131072,
1063      "default_max_tokens": 8000,
1064      "can_reason": false,
1065      "supports_attachments": false,
1066      "options": {}
1067    },
1068    {
1069      "id": "xai/grok-4",
1070      "name": "Grok 4",
1071      "cost_per_1m_in": 3,
1072      "cost_per_1m_out": 15,
1073      "cost_per_1m_in_cached": 0,
1074      "cost_per_1m_out_cached": 0,
1075      "context_window": 256000,
1076      "default_max_tokens": 8000,
1077      "can_reason": true,
1078      "reasoning_levels": [
1079        "low",
1080        "medium",
1081        "high"
1082      ],
1083      "default_reasoning_effort": "medium",
1084      "supports_attachments": true,
1085      "options": {}
1086    },
1087    {
1088      "id": "xai/grok-4-fast-non-reasoning",
1089      "name": "Grok 4 Fast Non-Reasoning",
1090      "cost_per_1m_in": 0.19999999999999998,
1091      "cost_per_1m_out": 0.5,
1092      "cost_per_1m_in_cached": 0.049999999999999996,
1093      "cost_per_1m_out_cached": 0,
1094      "context_window": 2000000,
1095      "default_max_tokens": 8000,
1096      "can_reason": false,
1097      "supports_attachments": false,
1098      "options": {}
1099    },
1100    {
1101      "id": "xai/grok-4-fast-reasoning",
1102      "name": "Grok 4 Fast Reasoning",
1103      "cost_per_1m_in": 0.19999999999999998,
1104      "cost_per_1m_out": 0.5,
1105      "cost_per_1m_in_cached": 0.049999999999999996,
1106      "cost_per_1m_out_cached": 0,
1107      "context_window": 2000000,
1108      "default_max_tokens": 8000,
1109      "can_reason": true,
1110      "reasoning_levels": [
1111        "low",
1112        "medium",
1113        "high"
1114      ],
1115      "default_reasoning_effort": "medium",
1116      "supports_attachments": false,
1117      "options": {}
1118    },
1119    {
1120      "id": "xai/grok-4.1-fast-non-reasoning",
1121      "name": "Grok 4.1 Fast Non-Reasoning",
1122      "cost_per_1m_in": 0.19999999999999998,
1123      "cost_per_1m_out": 0.5,
1124      "cost_per_1m_in_cached": 0.049999999999999996,
1125      "cost_per_1m_out_cached": 0,
1126      "context_window": 2000000,
1127      "default_max_tokens": 8000,
1128      "can_reason": false,
1129      "supports_attachments": false,
1130      "options": {}
1131    },
1132    {
1133      "id": "xai/grok-4.1-fast-reasoning",
1134      "name": "Grok 4.1 Fast Reasoning",
1135      "cost_per_1m_in": 0.19999999999999998,
1136      "cost_per_1m_out": 0.5,
1137      "cost_per_1m_in_cached": 0.049999999999999996,
1138      "cost_per_1m_out_cached": 0,
1139      "context_window": 2000000,
1140      "default_max_tokens": 8000,
1141      "can_reason": true,
1142      "reasoning_levels": [
1143        "low",
1144        "medium",
1145        "high"
1146      ],
1147      "default_reasoning_effort": "medium",
1148      "supports_attachments": false,
1149      "options": {}
1150    },
1151    {
1152      "id": "xai/grok-code-fast-1",
1153      "name": "Grok Code Fast 1",
1154      "cost_per_1m_in": 0.19999999999999998,
1155      "cost_per_1m_out": 1.5,
1156      "cost_per_1m_in_cached": 0.02,
1157      "cost_per_1m_out_cached": 0,
1158      "context_window": 256000,
1159      "default_max_tokens": 8000,
1160      "can_reason": true,
1161      "reasoning_levels": [
1162        "low",
1163        "medium",
1164        "high"
1165      ],
1166      "default_reasoning_effort": "medium",
1167      "supports_attachments": false,
1168      "options": {}
1169    },
1170    {
1171      "id": "prime-intellect/intellect-3",
1172      "name": "INTELLECT 3",
1173      "cost_per_1m_in": 0.19999999999999998,
1174      "cost_per_1m_out": 1.1,
1175      "cost_per_1m_in_cached": 0,
1176      "cost_per_1m_out_cached": 0,
1177      "context_window": 131072,
1178      "default_max_tokens": 8000,
1179      "can_reason": true,
1180      "reasoning_levels": [
1181        "low",
1182        "medium",
1183        "high"
1184      ],
1185      "default_reasoning_effort": "medium",
1186      "supports_attachments": false,
1187      "options": {}
1188    },
1189    {
1190      "id": "moonshotai/kimi-k2",
1191      "name": "Kimi K2",
1192      "cost_per_1m_in": 0.5,
1193      "cost_per_1m_out": 2,
1194      "cost_per_1m_in_cached": 0,
1195      "cost_per_1m_out_cached": 0,
1196      "context_window": 131072,
1197      "default_max_tokens": 8000,
1198      "can_reason": false,
1199      "supports_attachments": false,
1200      "options": {}
1201    },
1202    {
1203      "id": "moonshotai/kimi-k2-thinking",
1204      "name": "Kimi K2 Thinking",
1205      "cost_per_1m_in": 0.47,
1206      "cost_per_1m_out": 2,
1207      "cost_per_1m_in_cached": 0.14100000000000001,
1208      "cost_per_1m_out_cached": 0,
1209      "context_window": 216144,
1210      "default_max_tokens": 8000,
1211      "can_reason": true,
1212      "reasoning_levels": [
1213        "low",
1214        "medium",
1215        "high"
1216      ],
1217      "default_reasoning_effort": "medium",
1218      "supports_attachments": false,
1219      "options": {}
1220    },
1221    {
1222      "id": "moonshotai/kimi-k2-thinking-turbo",
1223      "name": "Kimi K2 Thinking Turbo",
1224      "cost_per_1m_in": 1.15,
1225      "cost_per_1m_out": 8,
1226      "cost_per_1m_in_cached": 0.15,
1227      "cost_per_1m_out_cached": 0,
1228      "context_window": 262114,
1229      "default_max_tokens": 8000,
1230      "can_reason": true,
1231      "reasoning_levels": [
1232        "low",
1233        "medium",
1234        "high"
1235      ],
1236      "default_reasoning_effort": "medium",
1237      "supports_attachments": false,
1238      "options": {}
1239    },
1240    {
1241      "id": "moonshotai/kimi-k2-turbo",
1242      "name": "Kimi K2 Turbo",
1243      "cost_per_1m_in": 2.4,
1244      "cost_per_1m_out": 10,
1245      "cost_per_1m_in_cached": 0,
1246      "cost_per_1m_out_cached": 0,
1247      "context_window": 256000,
1248      "default_max_tokens": 8000,
1249      "can_reason": false,
1250      "supports_attachments": false,
1251      "options": {}
1252    },
1253    {
1254      "id": "moonshotai/kimi-k2.5",
1255      "name": "Kimi K2.5",
1256      "cost_per_1m_in": 0.6,
1257      "cost_per_1m_out": 3,
1258      "cost_per_1m_in_cached": 0.09999999999999999,
1259      "cost_per_1m_out_cached": 0,
1260      "context_window": 256000,
1261      "default_max_tokens": 8000,
1262      "can_reason": true,
1263      "reasoning_levels": [
1264        "low",
1265        "medium",
1266        "high"
1267      ],
1268      "default_reasoning_effort": "medium",
1269      "supports_attachments": true,
1270      "options": {}
1271    },
1272    {
1273      "id": "meta/llama-3.1-70b",
1274      "name": "Llama 3.1 70B Instruct",
1275      "cost_per_1m_in": 0.39999999999999997,
1276      "cost_per_1m_out": 0.39999999999999997,
1277      "cost_per_1m_in_cached": 0,
1278      "cost_per_1m_out_cached": 0,
1279      "context_window": 131072,
1280      "default_max_tokens": 8000,
1281      "can_reason": false,
1282      "supports_attachments": false,
1283      "options": {}
1284    },
1285    {
1286      "id": "meta/llama-3.1-8b",
1287      "name": "Llama 3.1 8B Instruct",
1288      "cost_per_1m_in": 0.03,
1289      "cost_per_1m_out": 0.049999999999999996,
1290      "cost_per_1m_in_cached": 0,
1291      "cost_per_1m_out_cached": 0,
1292      "context_window": 131072,
1293      "default_max_tokens": 8000,
1294      "can_reason": false,
1295      "supports_attachments": false,
1296      "options": {}
1297    },
1298    {
1299      "id": "meta/llama-3.2-11b",
1300      "name": "Llama 3.2 11B Vision Instruct",
1301      "cost_per_1m_in": 0.16,
1302      "cost_per_1m_out": 0.16,
1303      "cost_per_1m_in_cached": 0,
1304      "cost_per_1m_out_cached": 0,
1305      "context_window": 128000,
1306      "default_max_tokens": 8000,
1307      "can_reason": false,
1308      "supports_attachments": true,
1309      "options": {}
1310    },
1311    {
1312      "id": "meta/llama-3.2-90b",
1313      "name": "Llama 3.2 90B Vision Instruct",
1314      "cost_per_1m_in": 0.72,
1315      "cost_per_1m_out": 0.72,
1316      "cost_per_1m_in_cached": 0,
1317      "cost_per_1m_out_cached": 0,
1318      "context_window": 128000,
1319      "default_max_tokens": 8000,
1320      "can_reason": false,
1321      "supports_attachments": true,
1322      "options": {}
1323    },
1324    {
1325      "id": "meta/llama-3.3-70b",
1326      "name": "Llama 3.3 70B Instruct",
1327      "cost_per_1m_in": 0.72,
1328      "cost_per_1m_out": 0.72,
1329      "cost_per_1m_in_cached": 0,
1330      "cost_per_1m_out_cached": 0,
1331      "context_window": 128000,
1332      "default_max_tokens": 8000,
1333      "can_reason": false,
1334      "supports_attachments": false,
1335      "options": {}
1336    },
1337    {
1338      "id": "meta/llama-4-maverick",
1339      "name": "Llama 4 Maverick 17B Instruct",
1340      "cost_per_1m_in": 0.15,
1341      "cost_per_1m_out": 0.6,
1342      "cost_per_1m_in_cached": 0,
1343      "cost_per_1m_out_cached": 0,
1344      "context_window": 131072,
1345      "default_max_tokens": 8000,
1346      "can_reason": false,
1347      "supports_attachments": true,
1348      "options": {}
1349    },
1350    {
1351      "id": "meta/llama-4-scout",
1352      "name": "Llama 4 Scout 17B Instruct",
1353      "cost_per_1m_in": 0.08,
1354      "cost_per_1m_out": 0.3,
1355      "cost_per_1m_in_cached": 0,
1356      "cost_per_1m_out_cached": 0,
1357      "context_window": 131072,
1358      "default_max_tokens": 8000,
1359      "can_reason": false,
1360      "supports_attachments": true,
1361      "options": {}
1362    },
1363    {
1364      "id": "meituan/longcat-flash-chat",
1365      "name": "LongCat Flash Chat",
1366      "cost_per_1m_in": 0,
1367      "cost_per_1m_out": 0,
1368      "cost_per_1m_in_cached": 0,
1369      "cost_per_1m_out_cached": 0,
1370      "context_window": 128000,
1371      "default_max_tokens": 8000,
1372      "can_reason": false,
1373      "supports_attachments": false,
1374      "options": {}
1375    },
1376    {
1377      "id": "meituan/longcat-flash-thinking",
1378      "name": "LongCat Flash Thinking",
1379      "cost_per_1m_in": 0.15,
1380      "cost_per_1m_out": 1.5,
1381      "cost_per_1m_in_cached": 0,
1382      "cost_per_1m_out_cached": 0,
1383      "context_window": 128000,
1384      "default_max_tokens": 8000,
1385      "can_reason": true,
1386      "reasoning_levels": [
1387        "low",
1388        "medium",
1389        "high"
1390      ],
1391      "default_reasoning_effort": "medium",
1392      "supports_attachments": false,
1393      "options": {}
1394    },
1395    {
1396      "id": "inception/mercury-coder-small",
1397      "name": "Mercury Coder Small Beta",
1398      "cost_per_1m_in": 0.25,
1399      "cost_per_1m_out": 1,
1400      "cost_per_1m_in_cached": 0,
1401      "cost_per_1m_out_cached": 0,
1402      "context_window": 32000,
1403      "default_max_tokens": 8000,
1404      "can_reason": false,
1405      "supports_attachments": false,
1406      "options": {}
1407    },
1408    {
1409      "id": "xiaomi/mimo-v2-flash",
1410      "name": "MiMo V2 Flash",
1411      "cost_per_1m_in": 0.09,
1412      "cost_per_1m_out": 0.29,
1413      "cost_per_1m_in_cached": 0,
1414      "cost_per_1m_out_cached": 0,
1415      "context_window": 262144,
1416      "default_max_tokens": 8000,
1417      "can_reason": true,
1418      "reasoning_levels": [
1419        "low",
1420        "medium",
1421        "high"
1422      ],
1423      "default_reasoning_effort": "medium",
1424      "supports_attachments": false,
1425      "options": {}
1426    },
1427    {
1428      "id": "minimax/minimax-m2",
1429      "name": "MiniMax M2",
1430      "cost_per_1m_in": 0.27,
1431      "cost_per_1m_out": 1.15,
1432      "cost_per_1m_in_cached": 0,
1433      "cost_per_1m_out_cached": 0,
1434      "context_window": 262114,
1435      "default_max_tokens": 8000,
1436      "can_reason": true,
1437      "reasoning_levels": [
1438        "low",
1439        "medium",
1440        "high"
1441      ],
1442      "default_reasoning_effort": "medium",
1443      "supports_attachments": false,
1444      "options": {}
1445    },
1446    {
1447      "id": "minimax/minimax-m2.1",
1448      "name": "MiniMax M2.1",
1449      "cost_per_1m_in": 0.28,
1450      "cost_per_1m_out": 1.2,
1451      "cost_per_1m_in_cached": 0.14,
1452      "cost_per_1m_out_cached": 0,
1453      "context_window": 196608,
1454      "default_max_tokens": 8000,
1455      "can_reason": true,
1456      "reasoning_levels": [
1457        "low",
1458        "medium",
1459        "high"
1460      ],
1461      "default_reasoning_effort": "medium",
1462      "supports_attachments": false,
1463      "options": {}
1464    },
1465    {
1466      "id": "minimax/minimax-m2.1-lightning",
1467      "name": "MiniMax M2.1 Lightning",
1468      "cost_per_1m_in": 0.3,
1469      "cost_per_1m_out": 2.4,
1470      "cost_per_1m_in_cached": 0.03,
1471      "cost_per_1m_out_cached": 0.375,
1472      "context_window": 204800,
1473      "default_max_tokens": 8000,
1474      "can_reason": true,
1475      "reasoning_levels": [
1476        "low",
1477        "medium",
1478        "high"
1479      ],
1480      "default_reasoning_effort": "medium",
1481      "supports_attachments": false,
1482      "options": {}
1483    },
1484    {
1485      "id": "mistral/ministral-3b",
1486      "name": "Ministral 3B",
1487      "cost_per_1m_in": 0.04,
1488      "cost_per_1m_out": 0.04,
1489      "cost_per_1m_in_cached": 0,
1490      "cost_per_1m_out_cached": 0,
1491      "context_window": 128000,
1492      "default_max_tokens": 4000,
1493      "can_reason": false,
1494      "supports_attachments": false,
1495      "options": {}
1496    },
1497    {
1498      "id": "mistral/ministral-8b",
1499      "name": "Ministral 8B",
1500      "cost_per_1m_in": 0.09999999999999999,
1501      "cost_per_1m_out": 0.09999999999999999,
1502      "cost_per_1m_in_cached": 0,
1503      "cost_per_1m_out_cached": 0,
1504      "context_window": 128000,
1505      "default_max_tokens": 4000,
1506      "can_reason": false,
1507      "supports_attachments": false,
1508      "options": {}
1509    },
1510    {
1511      "id": "mistral/codestral",
1512      "name": "Mistral Codestral",
1513      "cost_per_1m_in": 0.3,
1514      "cost_per_1m_out": 0.8999999999999999,
1515      "cost_per_1m_in_cached": 0,
1516      "cost_per_1m_out_cached": 0,
1517      "context_window": 128000,
1518      "default_max_tokens": 4000,
1519      "can_reason": false,
1520      "supports_attachments": false,
1521      "options": {}
1522    },
1523    {
1524      "id": "mistral/mistral-medium",
1525      "name": "Mistral Medium 3.1",
1526      "cost_per_1m_in": 0.39999999999999997,
1527      "cost_per_1m_out": 2,
1528      "cost_per_1m_in_cached": 0,
1529      "cost_per_1m_out_cached": 0,
1530      "context_window": 128000,
1531      "default_max_tokens": 8000,
1532      "can_reason": false,
1533      "supports_attachments": true,
1534      "options": {}
1535    },
1536    {
1537      "id": "mistral/mistral-small",
1538      "name": "Mistral Small",
1539      "cost_per_1m_in": 0.09999999999999999,
1540      "cost_per_1m_out": 0.3,
1541      "cost_per_1m_in_cached": 0,
1542      "cost_per_1m_out_cached": 0,
1543      "context_window": 32000,
1544      "default_max_tokens": 4000,
1545      "can_reason": false,
1546      "supports_attachments": true,
1547      "options": {}
1548    },
1549    {
1550      "id": "nvidia/nemotron-nano-12b-v2-vl",
1551      "name": "Nvidia Nemotron Nano 12B V2 VL",
1552      "cost_per_1m_in": 0.19999999999999998,
1553      "cost_per_1m_out": 0.6,
1554      "cost_per_1m_in_cached": 0,
1555      "cost_per_1m_out_cached": 0,
1556      "context_window": 131072,
1557      "default_max_tokens": 8000,
1558      "can_reason": true,
1559      "reasoning_levels": [
1560        "low",
1561        "medium",
1562        "high"
1563      ],
1564      "default_reasoning_effort": "medium",
1565      "supports_attachments": true,
1566      "options": {}
1567    },
1568    {
1569      "id": "nvidia/nemotron-nano-9b-v2",
1570      "name": "Nvidia Nemotron Nano 9B V2",
1571      "cost_per_1m_in": 0.04,
1572      "cost_per_1m_out": 0.16,
1573      "cost_per_1m_in_cached": 0,
1574      "cost_per_1m_out_cached": 0,
1575      "context_window": 131072,
1576      "default_max_tokens": 8000,
1577      "can_reason": true,
1578      "reasoning_levels": [
1579        "low",
1580        "medium",
1581        "high"
1582      ],
1583      "default_reasoning_effort": "medium",
1584      "supports_attachments": false,
1585      "options": {}
1586    },
1587    {
1588      "id": "mistral/pixtral-12b",
1589      "name": "Pixtral 12B 2409",
1590      "cost_per_1m_in": 0.15,
1591      "cost_per_1m_out": 0.15,
1592      "cost_per_1m_in_cached": 0,
1593      "cost_per_1m_out_cached": 0,
1594      "context_window": 128000,
1595      "default_max_tokens": 4000,
1596      "can_reason": false,
1597      "supports_attachments": true,
1598      "options": {}
1599    },
1600    {
1601      "id": "mistral/pixtral-large",
1602      "name": "Pixtral Large",
1603      "cost_per_1m_in": 2,
1604      "cost_per_1m_out": 6,
1605      "cost_per_1m_in_cached": 0,
1606      "cost_per_1m_out_cached": 0,
1607      "context_window": 128000,
1608      "default_max_tokens": 4000,
1609      "can_reason": false,
1610      "supports_attachments": true,
1611      "options": {}
1612    },
1613    {
1614      "id": "alibaba/qwen3-coder-30b-a3b",
1615      "name": "Qwen 3 Coder 30B A3B Instruct",
1616      "cost_per_1m_in": 0.07,
1617      "cost_per_1m_out": 0.27,
1618      "cost_per_1m_in_cached": 0,
1619      "cost_per_1m_out_cached": 0,
1620      "context_window": 160000,
1621      "default_max_tokens": 8000,
1622      "can_reason": true,
1623      "reasoning_levels": [
1624        "low",
1625        "medium",
1626        "high"
1627      ],
1628      "default_reasoning_effort": "medium",
1629      "supports_attachments": false,
1630      "options": {}
1631    },
1632    {
1633      "id": "alibaba/qwen3-max-thinking",
1634      "name": "Qwen 3 Max Thinking",
1635      "cost_per_1m_in": 1.2,
1636      "cost_per_1m_out": 6,
1637      "cost_per_1m_in_cached": 0.24,
1638      "cost_per_1m_out_cached": 0,
1639      "context_window": 256000,
1640      "default_max_tokens": 8000,
1641      "can_reason": true,
1642      "reasoning_levels": [
1643        "low",
1644        "medium",
1645        "high"
1646      ],
1647      "default_reasoning_effort": "medium",
1648      "supports_attachments": false,
1649      "options": {}
1650    },
1651    {
1652      "id": "alibaba/qwen-3-32b",
1653      "name": "Qwen 3.32B",
1654      "cost_per_1m_in": 0.09999999999999999,
1655      "cost_per_1m_out": 0.3,
1656      "cost_per_1m_in_cached": 0,
1657      "cost_per_1m_out_cached": 0,
1658      "context_window": 40960,
1659      "default_max_tokens": 8000,
1660      "can_reason": true,
1661      "reasoning_levels": [
1662        "low",
1663        "medium",
1664        "high"
1665      ],
1666      "default_reasoning_effort": "medium",
1667      "supports_attachments": false,
1668      "options": {}
1669    },
1670    {
1671      "id": "alibaba/qwen3-235b-a22b-thinking",
1672      "name": "Qwen3 235B A22B Thinking 2507",
1673      "cost_per_1m_in": 0.3,
1674      "cost_per_1m_out": 2.9000000000000004,
1675      "cost_per_1m_in_cached": 0,
1676      "cost_per_1m_out_cached": 0,
1677      "context_window": 262114,
1678      "default_max_tokens": 8000,
1679      "can_reason": true,
1680      "reasoning_levels": [
1681        "low",
1682        "medium",
1683        "high"
1684      ],
1685      "default_reasoning_effort": "medium",
1686      "supports_attachments": true,
1687      "options": {}
1688    },
1689    {
1690      "id": "alibaba/qwen-3-235b",
1691      "name": "Qwen3 235B A22b Instruct 2507",
1692      "cost_per_1m_in": 0.071,
1693      "cost_per_1m_out": 0.463,
1694      "cost_per_1m_in_cached": 0,
1695      "cost_per_1m_out_cached": 0,
1696      "context_window": 40960,
1697      "default_max_tokens": 8000,
1698      "can_reason": false,
1699      "supports_attachments": false,
1700      "options": {}
1701    },
1702    {
1703      "id": "alibaba/qwen3-coder",
1704      "name": "Qwen3 Coder 480B A35B Instruct",
1705      "cost_per_1m_in": 0.38,
1706      "cost_per_1m_out": 1.53,
1707      "cost_per_1m_in_cached": 0,
1708      "cost_per_1m_out_cached": 0,
1709      "context_window": 262144,
1710      "default_max_tokens": 8000,
1711      "can_reason": false,
1712      "supports_attachments": false,
1713      "options": {}
1714    },
1715    {
1716      "id": "alibaba/qwen3-coder-plus",
1717      "name": "Qwen3 Coder Plus",
1718      "cost_per_1m_in": 1,
1719      "cost_per_1m_out": 5,
1720      "cost_per_1m_in_cached": 0.19999999999999998,
1721      "cost_per_1m_out_cached": 0,
1722      "context_window": 1000000,
1723      "default_max_tokens": 8000,
1724      "can_reason": false,
1725      "supports_attachments": false,
1726      "options": {}
1727    },
1728    {
1729      "id": "alibaba/qwen3-max",
1730      "name": "Qwen3 Max",
1731      "cost_per_1m_in": 1.2,
1732      "cost_per_1m_out": 6,
1733      "cost_per_1m_in_cached": 0.24,
1734      "cost_per_1m_out_cached": 0,
1735      "context_window": 262144,
1736      "default_max_tokens": 8000,
1737      "can_reason": false,
1738      "supports_attachments": false,
1739      "options": {}
1740    },
1741    {
1742      "id": "alibaba/qwen3-max-preview",
1743      "name": "Qwen3 Max Preview",
1744      "cost_per_1m_in": 1.2,
1745      "cost_per_1m_out": 6,
1746      "cost_per_1m_in_cached": 0.24,
1747      "cost_per_1m_out_cached": 0,
1748      "context_window": 262144,
1749      "default_max_tokens": 8000,
1750      "can_reason": false,
1751      "supports_attachments": false,
1752      "options": {}
1753    },
1754    {
1755      "id": "alibaba/qwen-3-14b",
1756      "name": "Qwen3-14B",
1757      "cost_per_1m_in": 0.06,
1758      "cost_per_1m_out": 0.24,
1759      "cost_per_1m_in_cached": 0,
1760      "cost_per_1m_out_cached": 0,
1761      "context_window": 40960,
1762      "default_max_tokens": 8000,
1763      "can_reason": true,
1764      "reasoning_levels": [
1765        "low",
1766        "medium",
1767        "high"
1768      ],
1769      "default_reasoning_effort": "medium",
1770      "supports_attachments": false,
1771      "options": {}
1772    },
1773    {
1774      "id": "alibaba/qwen-3-30b",
1775      "name": "Qwen3-30B-A3B",
1776      "cost_per_1m_in": 0.08,
1777      "cost_per_1m_out": 0.29,
1778      "cost_per_1m_in_cached": 0,
1779      "cost_per_1m_out_cached": 0,
1780      "context_window": 40960,
1781      "default_max_tokens": 8000,
1782      "can_reason": true,
1783      "reasoning_levels": [
1784        "low",
1785        "medium",
1786        "high"
1787      ],
1788      "default_reasoning_effort": "medium",
1789      "supports_attachments": false,
1790      "options": {}
1791    },
1792    {
1793      "id": "bytedance/seed-1.6",
1794      "name": "Seed 1.6",
1795      "cost_per_1m_in": 0.25,
1796      "cost_per_1m_out": 2,
1797      "cost_per_1m_in_cached": 0.049999999999999996,
1798      "cost_per_1m_out_cached": 0,
1799      "context_window": 256000,
1800      "default_max_tokens": 8000,
1801      "can_reason": true,
1802      "reasoning_levels": [
1803        "low",
1804        "medium",
1805        "high"
1806      ],
1807      "default_reasoning_effort": "medium",
1808      "supports_attachments": false,
1809      "options": {}
1810    },
1811    {
1812      "id": "perplexity/sonar",
1813      "name": "Sonar",
1814      "cost_per_1m_in": 1,
1815      "cost_per_1m_out": 1,
1816      "cost_per_1m_in_cached": 0,
1817      "cost_per_1m_out_cached": 0,
1818      "context_window": 127000,
1819      "default_max_tokens": 8000,
1820      "can_reason": false,
1821      "supports_attachments": true,
1822      "options": {}
1823    },
1824    {
1825      "id": "perplexity/sonar-pro",
1826      "name": "Sonar Pro",
1827      "cost_per_1m_in": 3,
1828      "cost_per_1m_out": 15,
1829      "cost_per_1m_in_cached": 0,
1830      "cost_per_1m_out_cached": 0,
1831      "context_window": 200000,
1832      "default_max_tokens": 8000,
1833      "can_reason": false,
1834      "supports_attachments": true,
1835      "options": {}
1836    },
1837    {
1838      "id": "arcee-ai/trinity-large-preview",
1839      "name": "Trinity Large Preview",
1840      "cost_per_1m_in": 0.25,
1841      "cost_per_1m_out": 1,
1842      "cost_per_1m_in_cached": 0,
1843      "cost_per_1m_out_cached": 0,
1844      "context_window": 131000,
1845      "default_max_tokens": 8000,
1846      "can_reason": false,
1847      "supports_attachments": false,
1848      "options": {}
1849    },
1850    {
1851      "id": "openai/gpt-oss-120b",
1852      "name": "gpt-oss-120b",
1853      "cost_per_1m_in": 0.09999999999999999,
1854      "cost_per_1m_out": 0.5,
1855      "cost_per_1m_in_cached": 0,
1856      "cost_per_1m_out_cached": 0,
1857      "context_window": 131072,
1858      "default_max_tokens": 8000,
1859      "can_reason": true,
1860      "reasoning_levels": [
1861        "low",
1862        "medium",
1863        "high"
1864      ],
1865      "default_reasoning_effort": "medium",
1866      "supports_attachments": false,
1867      "options": {}
1868    },
1869    {
1870      "id": "openai/gpt-oss-20b",
1871      "name": "gpt-oss-20b",
1872      "cost_per_1m_in": 0.07,
1873      "cost_per_1m_out": 0.3,
1874      "cost_per_1m_in_cached": 0,
1875      "cost_per_1m_out_cached": 0,
1876      "context_window": 128000,
1877      "default_max_tokens": 8000,
1878      "can_reason": true,
1879      "reasoning_levels": [
1880        "low",
1881        "medium",
1882        "high"
1883      ],
1884      "default_reasoning_effort": "medium",
1885      "supports_attachments": false,
1886      "options": {}
1887    },
1888    {
1889      "id": "openai/gpt-oss-safeguard-20b",
1890      "name": "gpt-oss-safeguard-20b",
1891      "cost_per_1m_in": 0.075,
1892      "cost_per_1m_out": 0.3,
1893      "cost_per_1m_in_cached": 0.037,
1894      "cost_per_1m_out_cached": 0,
1895      "context_window": 131072,
1896      "default_max_tokens": 8000,
1897      "can_reason": true,
1898      "reasoning_levels": [
1899        "low",
1900        "medium",
1901        "high"
1902      ],
1903      "default_reasoning_effort": "medium",
1904      "supports_attachments": false,
1905      "options": {}
1906    },
1907    {
1908      "id": "openai/o1",
1909      "name": "o1",
1910      "cost_per_1m_in": 15,
1911      "cost_per_1m_out": 60,
1912      "cost_per_1m_in_cached": 7.5,
1913      "cost_per_1m_out_cached": 0,
1914      "context_window": 200000,
1915      "default_max_tokens": 8000,
1916      "can_reason": true,
1917      "reasoning_levels": [
1918        "low",
1919        "medium",
1920        "high"
1921      ],
1922      "default_reasoning_effort": "medium",
1923      "supports_attachments": true,
1924      "options": {}
1925    },
1926    {
1927      "id": "openai/o3",
1928      "name": "o3",
1929      "cost_per_1m_in": 2,
1930      "cost_per_1m_out": 8,
1931      "cost_per_1m_in_cached": 0.5,
1932      "cost_per_1m_out_cached": 0,
1933      "context_window": 200000,
1934      "default_max_tokens": 8000,
1935      "can_reason": true,
1936      "reasoning_levels": [
1937        "low",
1938        "medium",
1939        "high"
1940      ],
1941      "default_reasoning_effort": "medium",
1942      "supports_attachments": true,
1943      "options": {}
1944    },
1945    {
1946      "id": "openai/o3-pro",
1947      "name": "o3 Pro",
1948      "cost_per_1m_in": 20,
1949      "cost_per_1m_out": 80,
1950      "cost_per_1m_in_cached": 0,
1951      "cost_per_1m_out_cached": 0,
1952      "context_window": 200000,
1953      "default_max_tokens": 8000,
1954      "can_reason": true,
1955      "reasoning_levels": [
1956        "low",
1957        "medium",
1958        "high"
1959      ],
1960      "default_reasoning_effort": "medium",
1961      "supports_attachments": true,
1962      "options": {}
1963    },
1964    {
1965      "id": "openai/o3-deep-research",
1966      "name": "o3-deep-research",
1967      "cost_per_1m_in": 10,
1968      "cost_per_1m_out": 40,
1969      "cost_per_1m_in_cached": 2.5,
1970      "cost_per_1m_out_cached": 0,
1971      "context_window": 200000,
1972      "default_max_tokens": 8000,
1973      "can_reason": true,
1974      "reasoning_levels": [
1975        "low",
1976        "medium",
1977        "high"
1978      ],
1979      "default_reasoning_effort": "medium",
1980      "supports_attachments": true,
1981      "options": {}
1982    },
1983    {
1984      "id": "openai/o3-mini",
1985      "name": "o3-mini",
1986      "cost_per_1m_in": 1.1,
1987      "cost_per_1m_out": 4.4,
1988      "cost_per_1m_in_cached": 0.55,
1989      "cost_per_1m_out_cached": 0,
1990      "context_window": 200000,
1991      "default_max_tokens": 8000,
1992      "can_reason": true,
1993      "reasoning_levels": [
1994        "low",
1995        "medium",
1996        "high"
1997      ],
1998      "default_reasoning_effort": "medium",
1999      "supports_attachments": false,
2000      "options": {}
2001    },
2002    {
2003      "id": "openai/o4-mini",
2004      "name": "o4-mini",
2005      "cost_per_1m_in": 1.1,
2006      "cost_per_1m_out": 4.4,
2007      "cost_per_1m_in_cached": 0.275,
2008      "cost_per_1m_out_cached": 0,
2009      "context_window": 200000,
2010      "default_max_tokens": 8000,
2011      "can_reason": true,
2012      "reasoning_levels": [
2013        "low",
2014        "medium",
2015        "high"
2016      ],
2017      "default_reasoning_effort": "medium",
2018      "supports_attachments": true,
2019      "options": {}
2020    },
2021    {
2022      "id": "vercel/v0-1.0-md",
2023      "name": "v0-1.0-md",
2024      "cost_per_1m_in": 3,
2025      "cost_per_1m_out": 15,
2026      "cost_per_1m_in_cached": 0,
2027      "cost_per_1m_out_cached": 0,
2028      "context_window": 128000,
2029      "default_max_tokens": 8000,
2030      "can_reason": false,
2031      "supports_attachments": true,
2032      "options": {}
2033    },
2034    {
2035      "id": "vercel/v0-1.5-md",
2036      "name": "v0-1.5-md",
2037      "cost_per_1m_in": 3,
2038      "cost_per_1m_out": 15,
2039      "cost_per_1m_in_cached": 0,
2040      "cost_per_1m_out_cached": 0,
2041      "context_window": 128000,
2042      "default_max_tokens": 8000,
2043      "can_reason": false,
2044      "supports_attachments": true,
2045      "options": {}
2046    }
2047  ],
2048  "default_headers": {
2049    "HTTP-Referer": "https://charm.land",
2050    "X-Title": "Crush"
2051  }
2052}