vercel.json

   1{
   2  "name": "Vercel",
   3  "id": "vercel",
   4  "api_key": "$VERCEL_API_KEY",
   5  "api_endpoint": "https://ai-gateway.vercel.sh/v1",
   6  "type": "vercel",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-haiku-4.5",
   9  "models": [
  10    {
  11      "id": "anthropic/claude-3-haiku",
  12      "name": "Claude 3 Haiku",
  13      "cost_per_1m_in": 0.25,
  14      "cost_per_1m_out": 1.25,
  15      "cost_per_1m_in_cached": 0.03,
  16      "cost_per_1m_out_cached": 0.3,
  17      "context_window": 200000,
  18      "default_max_tokens": 4096,
  19      "can_reason": false,
  20      "supports_attachments": true,
  21      "options": {}
  22    },
  23    {
  24      "id": "anthropic/claude-3.5-haiku",
  25      "name": "Claude 3.5 Haiku",
  26      "cost_per_1m_in": 0.7999999999999999,
  27      "cost_per_1m_out": 4,
  28      "cost_per_1m_in_cached": 0.08,
  29      "cost_per_1m_out_cached": 1,
  30      "context_window": 200000,
  31      "default_max_tokens": 8000,
  32      "can_reason": false,
  33      "supports_attachments": true,
  34      "options": {}
  35    },
  36    {
  37      "id": "anthropic/claude-3.5-sonnet",
  38      "name": "Claude 3.5 Sonnet",
  39      "cost_per_1m_in": 3,
  40      "cost_per_1m_out": 15,
  41      "cost_per_1m_in_cached": 0.3,
  42      "cost_per_1m_out_cached": 3.75,
  43      "context_window": 200000,
  44      "default_max_tokens": 8000,
  45      "can_reason": false,
  46      "supports_attachments": true,
  47      "options": {}
  48    },
  49    {
  50      "id": "anthropic/claude-3.5-sonnet-20240620",
  51      "name": "Claude 3.5 Sonnet (2024-06-20)",
  52      "cost_per_1m_in": 3,
  53      "cost_per_1m_out": 15,
  54      "cost_per_1m_in_cached": 0,
  55      "cost_per_1m_out_cached": 0,
  56      "context_window": 200000,
  57      "default_max_tokens": 8000,
  58      "can_reason": false,
  59      "supports_attachments": true,
  60      "options": {}
  61    },
  62    {
  63      "id": "anthropic/claude-3.7-sonnet",
  64      "name": "Claude 3.7 Sonnet",
  65      "cost_per_1m_in": 3,
  66      "cost_per_1m_out": 15,
  67      "cost_per_1m_in_cached": 0.3,
  68      "cost_per_1m_out_cached": 3.75,
  69      "context_window": 200000,
  70      "default_max_tokens": 8000,
  71      "can_reason": true,
  72      "reasoning_levels": [
  73        "none",
  74        "minimal",
  75        "low",
  76        "medium",
  77        "high",
  78        "xhigh"
  79      ],
  80      "default_reasoning_effort": "medium",
  81      "supports_attachments": true,
  82      "options": {}
  83    },
  84    {
  85      "id": "anthropic/claude-haiku-4.5",
  86      "name": "Claude Haiku 4.5",
  87      "cost_per_1m_in": 1,
  88      "cost_per_1m_out": 5,
  89      "cost_per_1m_in_cached": 0.09999999999999999,
  90      "cost_per_1m_out_cached": 1.25,
  91      "context_window": 200000,
  92      "default_max_tokens": 8000,
  93      "can_reason": true,
  94      "reasoning_levels": [
  95        "none",
  96        "minimal",
  97        "low",
  98        "medium",
  99        "high",
 100        "xhigh"
 101      ],
 102      "default_reasoning_effort": "medium",
 103      "supports_attachments": true,
 104      "options": {}
 105    },
 106    {
 107      "id": "anthropic/claude-opus-4",
 108      "name": "Claude Opus 4",
 109      "cost_per_1m_in": 15,
 110      "cost_per_1m_out": 75,
 111      "cost_per_1m_in_cached": 1.5,
 112      "cost_per_1m_out_cached": 18.75,
 113      "context_window": 200000,
 114      "default_max_tokens": 8000,
 115      "can_reason": true,
 116      "reasoning_levels": [
 117        "none",
 118        "minimal",
 119        "low",
 120        "medium",
 121        "high",
 122        "xhigh"
 123      ],
 124      "default_reasoning_effort": "medium",
 125      "supports_attachments": true,
 126      "options": {}
 127    },
 128    {
 129      "id": "anthropic/claude-opus-4.1",
 130      "name": "Claude Opus 4.1",
 131      "cost_per_1m_in": 15,
 132      "cost_per_1m_out": 75,
 133      "cost_per_1m_in_cached": 1.5,
 134      "cost_per_1m_out_cached": 18.75,
 135      "context_window": 200000,
 136      "default_max_tokens": 8000,
 137      "can_reason": true,
 138      "reasoning_levels": [
 139        "none",
 140        "minimal",
 141        "low",
 142        "medium",
 143        "high",
 144        "xhigh"
 145      ],
 146      "default_reasoning_effort": "medium",
 147      "supports_attachments": true,
 148      "options": {}
 149    },
 150    {
 151      "id": "anthropic/claude-opus-4.5",
 152      "name": "Claude Opus 4.5",
 153      "cost_per_1m_in": 5,
 154      "cost_per_1m_out": 25,
 155      "cost_per_1m_in_cached": 0.5,
 156      "cost_per_1m_out_cached": 6.25,
 157      "context_window": 200000,
 158      "default_max_tokens": 8000,
 159      "can_reason": true,
 160      "reasoning_levels": [
 161        "none",
 162        "minimal",
 163        "low",
 164        "medium",
 165        "high",
 166        "xhigh"
 167      ],
 168      "default_reasoning_effort": "medium",
 169      "supports_attachments": true,
 170      "options": {}
 171    },
 172    {
 173      "id": "anthropic/claude-opus-4.6",
 174      "name": "Claude Opus 4.6",
 175      "cost_per_1m_in": 5,
 176      "cost_per_1m_out": 25,
 177      "cost_per_1m_in_cached": 0.5,
 178      "cost_per_1m_out_cached": 6.25,
 179      "context_window": 1000000,
 180      "default_max_tokens": 8000,
 181      "can_reason": true,
 182      "reasoning_levels": [
 183        "none",
 184        "minimal",
 185        "low",
 186        "medium",
 187        "high",
 188        "xhigh"
 189      ],
 190      "default_reasoning_effort": "medium",
 191      "supports_attachments": true,
 192      "options": {}
 193    },
 194    {
 195      "id": "anthropic/claude-sonnet-4",
 196      "name": "Claude Sonnet 4",
 197      "cost_per_1m_in": 3,
 198      "cost_per_1m_out": 15,
 199      "cost_per_1m_in_cached": 0.3,
 200      "cost_per_1m_out_cached": 3.75,
 201      "context_window": 1000000,
 202      "default_max_tokens": 8000,
 203      "can_reason": true,
 204      "reasoning_levels": [
 205        "none",
 206        "minimal",
 207        "low",
 208        "medium",
 209        "high",
 210        "xhigh"
 211      ],
 212      "default_reasoning_effort": "medium",
 213      "supports_attachments": true,
 214      "options": {}
 215    },
 216    {
 217      "id": "anthropic/claude-sonnet-4.5",
 218      "name": "Claude Sonnet 4.5",
 219      "cost_per_1m_in": 3,
 220      "cost_per_1m_out": 15,
 221      "cost_per_1m_in_cached": 0.3,
 222      "cost_per_1m_out_cached": 3.75,
 223      "context_window": 1000000,
 224      "default_max_tokens": 8000,
 225      "can_reason": true,
 226      "reasoning_levels": [
 227        "none",
 228        "minimal",
 229        "low",
 230        "medium",
 231        "high",
 232        "xhigh"
 233      ],
 234      "default_reasoning_effort": "medium",
 235      "supports_attachments": true,
 236      "options": {}
 237    },
 238    {
 239      "id": "openai/codex-mini",
 240      "name": "Codex Mini",
 241      "cost_per_1m_in": 1.5,
 242      "cost_per_1m_out": 6,
 243      "cost_per_1m_in_cached": 0.375,
 244      "cost_per_1m_out_cached": 0,
 245      "context_window": 200000,
 246      "default_max_tokens": 8000,
 247      "can_reason": true,
 248      "reasoning_levels": [
 249        "low",
 250        "medium",
 251        "high"
 252      ],
 253      "default_reasoning_effort": "medium",
 254      "supports_attachments": true,
 255      "options": {}
 256    },
 257    {
 258      "id": "cohere/command-a",
 259      "name": "Command A",
 260      "cost_per_1m_in": 2.5,
 261      "cost_per_1m_out": 10,
 262      "cost_per_1m_in_cached": 0,
 263      "cost_per_1m_out_cached": 0,
 264      "context_window": 256000,
 265      "default_max_tokens": 8000,
 266      "can_reason": false,
 267      "supports_attachments": false,
 268      "options": {}
 269    },
 270    {
 271      "id": "deepseek/deepseek-v3",
 272      "name": "DeepSeek V3 0324",
 273      "cost_per_1m_in": 0.77,
 274      "cost_per_1m_out": 0.77,
 275      "cost_per_1m_in_cached": 0,
 276      "cost_per_1m_out_cached": 0,
 277      "context_window": 163840,
 278      "default_max_tokens": 8000,
 279      "can_reason": false,
 280      "supports_attachments": false,
 281      "options": {}
 282    },
 283    {
 284      "id": "deepseek/deepseek-v3.1-terminus",
 285      "name": "DeepSeek V3.1 Terminus",
 286      "cost_per_1m_in": 0.27,
 287      "cost_per_1m_out": 1,
 288      "cost_per_1m_in_cached": 0,
 289      "cost_per_1m_out_cached": 0,
 290      "context_window": 131072,
 291      "default_max_tokens": 8000,
 292      "can_reason": true,
 293      "reasoning_levels": [
 294        "low",
 295        "medium",
 296        "high"
 297      ],
 298      "default_reasoning_effort": "medium",
 299      "supports_attachments": false,
 300      "options": {}
 301    },
 302    {
 303      "id": "deepseek/deepseek-v3.2-thinking",
 304      "name": "DeepSeek V3.2 Thinking",
 305      "cost_per_1m_in": 0.28,
 306      "cost_per_1m_out": 0.42,
 307      "cost_per_1m_in_cached": 0.028,
 308      "cost_per_1m_out_cached": 0,
 309      "context_window": 128000,
 310      "default_max_tokens": 8000,
 311      "can_reason": true,
 312      "reasoning_levels": [
 313        "low",
 314        "medium",
 315        "high"
 316      ],
 317      "default_reasoning_effort": "medium",
 318      "supports_attachments": false,
 319      "options": {}
 320    },
 321    {
 322      "id": "deepseek/deepseek-v3.1",
 323      "name": "DeepSeek-V3.1",
 324      "cost_per_1m_in": 0.3,
 325      "cost_per_1m_out": 1,
 326      "cost_per_1m_in_cached": 0,
 327      "cost_per_1m_out_cached": 0,
 328      "context_window": 163840,
 329      "default_max_tokens": 8000,
 330      "can_reason": true,
 331      "reasoning_levels": [
 332        "low",
 333        "medium",
 334        "high"
 335      ],
 336      "default_reasoning_effort": "medium",
 337      "supports_attachments": false,
 338      "options": {}
 339    },
 340    {
 341      "id": "mistral/devstral-2",
 342      "name": "Devstral 2",
 343      "cost_per_1m_in": 0,
 344      "cost_per_1m_out": 0,
 345      "cost_per_1m_in_cached": 0,
 346      "cost_per_1m_out_cached": 0,
 347      "context_window": 256000,
 348      "default_max_tokens": 8000,
 349      "can_reason": false,
 350      "supports_attachments": false,
 351      "options": {}
 352    },
 353    {
 354      "id": "mistral/devstral-small",
 355      "name": "Devstral Small 1.1",
 356      "cost_per_1m_in": 0.09999999999999999,
 357      "cost_per_1m_out": 0.3,
 358      "cost_per_1m_in_cached": 0,
 359      "cost_per_1m_out_cached": 0,
 360      "context_window": 128000,
 361      "default_max_tokens": 8000,
 362      "can_reason": false,
 363      "supports_attachments": false,
 364      "options": {}
 365    },
 366    {
 367      "id": "mistral/devstral-small-2",
 368      "name": "Devstral Small 2",
 369      "cost_per_1m_in": 0,
 370      "cost_per_1m_out": 0,
 371      "cost_per_1m_in_cached": 0,
 372      "cost_per_1m_out_cached": 0,
 373      "context_window": 256000,
 374      "default_max_tokens": 8000,
 375      "can_reason": false,
 376      "supports_attachments": false,
 377      "options": {}
 378    },
 379    {
 380      "id": "zai/glm-4.5-air",
 381      "name": "GLM 4.5 Air",
 382      "cost_per_1m_in": 0.19999999999999998,
 383      "cost_per_1m_out": 1.1,
 384      "cost_per_1m_in_cached": 0.03,
 385      "cost_per_1m_out_cached": 0,
 386      "context_window": 128000,
 387      "default_max_tokens": 8000,
 388      "can_reason": true,
 389      "reasoning_levels": [
 390        "low",
 391        "medium",
 392        "high"
 393      ],
 394      "default_reasoning_effort": "medium",
 395      "supports_attachments": false,
 396      "options": {}
 397    },
 398    {
 399      "id": "zai/glm-4.5v",
 400      "name": "GLM 4.5V",
 401      "cost_per_1m_in": 0.6,
 402      "cost_per_1m_out": 1.7999999999999998,
 403      "cost_per_1m_in_cached": 0,
 404      "cost_per_1m_out_cached": 0,
 405      "context_window": 65536,
 406      "default_max_tokens": 8000,
 407      "can_reason": true,
 408      "reasoning_levels": [
 409        "low",
 410        "medium",
 411        "high"
 412      ],
 413      "default_reasoning_effort": "medium",
 414      "supports_attachments": true,
 415      "options": {}
 416    },
 417    {
 418      "id": "zai/glm-4.6",
 419      "name": "GLM 4.6",
 420      "cost_per_1m_in": 0.44999999999999996,
 421      "cost_per_1m_out": 1.7999999999999998,
 422      "cost_per_1m_in_cached": 0.11,
 423      "cost_per_1m_out_cached": 0,
 424      "context_window": 200000,
 425      "default_max_tokens": 8000,
 426      "can_reason": true,
 427      "reasoning_levels": [
 428        "low",
 429        "medium",
 430        "high"
 431      ],
 432      "default_reasoning_effort": "medium",
 433      "supports_attachments": false,
 434      "options": {}
 435    },
 436    {
 437      "id": "zai/glm-4.7",
 438      "name": "GLM 4.7",
 439      "cost_per_1m_in": 0.43,
 440      "cost_per_1m_out": 1.75,
 441      "cost_per_1m_in_cached": 0.08,
 442      "cost_per_1m_out_cached": 0,
 443      "context_window": 202752,
 444      "default_max_tokens": 8000,
 445      "can_reason": true,
 446      "reasoning_levels": [
 447        "low",
 448        "medium",
 449        "high"
 450      ],
 451      "default_reasoning_effort": "medium",
 452      "supports_attachments": false,
 453      "options": {}
 454    },
 455    {
 456      "id": "zai/glm-4.7-flashx",
 457      "name": "GLM 4.7 FlashX",
 458      "cost_per_1m_in": 0.06,
 459      "cost_per_1m_out": 0.39999999999999997,
 460      "cost_per_1m_in_cached": 0.01,
 461      "cost_per_1m_out_cached": 0,
 462      "context_window": 200000,
 463      "default_max_tokens": 8000,
 464      "can_reason": true,
 465      "reasoning_levels": [
 466        "low",
 467        "medium",
 468        "high"
 469      ],
 470      "default_reasoning_effort": "medium",
 471      "supports_attachments": false,
 472      "options": {}
 473    },
 474    {
 475      "id": "zai/glm-4.5",
 476      "name": "GLM-4.5",
 477      "cost_per_1m_in": 0.6,
 478      "cost_per_1m_out": 2.2,
 479      "cost_per_1m_in_cached": 0,
 480      "cost_per_1m_out_cached": 0,
 481      "context_window": 131072,
 482      "default_max_tokens": 8000,
 483      "can_reason": true,
 484      "reasoning_levels": [
 485        "low",
 486        "medium",
 487        "high"
 488      ],
 489      "default_reasoning_effort": "medium",
 490      "supports_attachments": false,
 491      "options": {}
 492    },
 493    {
 494      "id": "zai/glm-4.6v",
 495      "name": "GLM-4.6V",
 496      "cost_per_1m_in": 0.3,
 497      "cost_per_1m_out": 0.8999999999999999,
 498      "cost_per_1m_in_cached": 0.049999999999999996,
 499      "cost_per_1m_out_cached": 0,
 500      "context_window": 128000,
 501      "default_max_tokens": 8000,
 502      "can_reason": true,
 503      "reasoning_levels": [
 504        "low",
 505        "medium",
 506        "high"
 507      ],
 508      "default_reasoning_effort": "medium",
 509      "supports_attachments": true,
 510      "options": {}
 511    },
 512    {
 513      "id": "zai/glm-4.6v-flash",
 514      "name": "GLM-4.6V-Flash",
 515      "cost_per_1m_in": 0,
 516      "cost_per_1m_out": 0,
 517      "cost_per_1m_in_cached": 0,
 518      "cost_per_1m_out_cached": 0,
 519      "context_window": 128000,
 520      "default_max_tokens": 8000,
 521      "can_reason": true,
 522      "reasoning_levels": [
 523        "low",
 524        "medium",
 525        "high"
 526      ],
 527      "default_reasoning_effort": "medium",
 528      "supports_attachments": true,
 529      "options": {}
 530    },
 531    {
 532      "id": "zai/glm-5",
 533      "name": "GLM-5",
 534      "cost_per_1m_in": 1,
 535      "cost_per_1m_out": 3.1999999999999997,
 536      "cost_per_1m_in_cached": 0.19999999999999998,
 537      "cost_per_1m_out_cached": 0,
 538      "context_window": 202800,
 539      "default_max_tokens": 8000,
 540      "can_reason": true,
 541      "reasoning_levels": [
 542        "low",
 543        "medium",
 544        "high"
 545      ],
 546      "default_reasoning_effort": "medium",
 547      "supports_attachments": false,
 548      "options": {}
 549    },
 550    {
 551      "id": "openai/gpt-5.1-codex-max",
 552      "name": "GPT 5.1 Codex Max",
 553      "cost_per_1m_in": 1.25,
 554      "cost_per_1m_out": 10,
 555      "cost_per_1m_in_cached": 0.125,
 556      "cost_per_1m_out_cached": 0,
 557      "context_window": 400000,
 558      "default_max_tokens": 8000,
 559      "can_reason": true,
 560      "reasoning_levels": [
 561        "low",
 562        "medium",
 563        "high"
 564      ],
 565      "default_reasoning_effort": "medium",
 566      "supports_attachments": true,
 567      "options": {}
 568    },
 569    {
 570      "id": "openai/gpt-5.1-thinking",
 571      "name": "GPT 5.1 Thinking",
 572      "cost_per_1m_in": 1.25,
 573      "cost_per_1m_out": 10,
 574      "cost_per_1m_in_cached": 0.13,
 575      "cost_per_1m_out_cached": 0,
 576      "context_window": 400000,
 577      "default_max_tokens": 8000,
 578      "can_reason": true,
 579      "reasoning_levels": [
 580        "low",
 581        "medium",
 582        "high"
 583      ],
 584      "default_reasoning_effort": "medium",
 585      "supports_attachments": true,
 586      "options": {}
 587    },
 588    {
 589      "id": "openai/gpt-5.2",
 590      "name": "GPT 5.2",
 591      "cost_per_1m_in": 1.75,
 592      "cost_per_1m_out": 14,
 593      "cost_per_1m_in_cached": 0.18,
 594      "cost_per_1m_out_cached": 0,
 595      "context_window": 400000,
 596      "default_max_tokens": 8000,
 597      "can_reason": true,
 598      "reasoning_levels": [
 599        "low",
 600        "medium",
 601        "high"
 602      ],
 603      "default_reasoning_effort": "medium",
 604      "supports_attachments": true,
 605      "options": {}
 606    },
 607    {
 608      "id": "openai/gpt-5.2-pro",
 609      "name": "GPT 5.2 ",
 610      "cost_per_1m_in": 21,
 611      "cost_per_1m_out": 168,
 612      "cost_per_1m_in_cached": 0,
 613      "cost_per_1m_out_cached": 0,
 614      "context_window": 400000,
 615      "default_max_tokens": 8000,
 616      "can_reason": true,
 617      "reasoning_levels": [
 618        "low",
 619        "medium",
 620        "high"
 621      ],
 622      "default_reasoning_effort": "medium",
 623      "supports_attachments": true,
 624      "options": {}
 625    },
 626    {
 627      "id": "openai/gpt-4-turbo",
 628      "name": "GPT-4 Turbo",
 629      "cost_per_1m_in": 10,
 630      "cost_per_1m_out": 30,
 631      "cost_per_1m_in_cached": 0,
 632      "cost_per_1m_out_cached": 0,
 633      "context_window": 128000,
 634      "default_max_tokens": 4096,
 635      "can_reason": false,
 636      "supports_attachments": true,
 637      "options": {}
 638    },
 639    {
 640      "id": "openai/gpt-4.1",
 641      "name": "GPT-4.1",
 642      "cost_per_1m_in": 2,
 643      "cost_per_1m_out": 8,
 644      "cost_per_1m_in_cached": 0.5,
 645      "cost_per_1m_out_cached": 0,
 646      "context_window": 1047576,
 647      "default_max_tokens": 8000,
 648      "can_reason": false,
 649      "supports_attachments": true,
 650      "options": {}
 651    },
 652    {
 653      "id": "openai/gpt-4.1-mini",
 654      "name": "GPT-4.1 mini",
 655      "cost_per_1m_in": 0.39999999999999997,
 656      "cost_per_1m_out": 1.5999999999999999,
 657      "cost_per_1m_in_cached": 0.09999999999999999,
 658      "cost_per_1m_out_cached": 0,
 659      "context_window": 1047576,
 660      "default_max_tokens": 8000,
 661      "can_reason": false,
 662      "supports_attachments": true,
 663      "options": {}
 664    },
 665    {
 666      "id": "openai/gpt-4.1-nano",
 667      "name": "GPT-4.1 nano",
 668      "cost_per_1m_in": 0.09999999999999999,
 669      "cost_per_1m_out": 0.39999999999999997,
 670      "cost_per_1m_in_cached": 0.03,
 671      "cost_per_1m_out_cached": 0,
 672      "context_window": 1047576,
 673      "default_max_tokens": 8000,
 674      "can_reason": false,
 675      "supports_attachments": true,
 676      "options": {}
 677    },
 678    {
 679      "id": "openai/gpt-4o",
 680      "name": "GPT-4o",
 681      "cost_per_1m_in": 2.5,
 682      "cost_per_1m_out": 10,
 683      "cost_per_1m_in_cached": 1.25,
 684      "cost_per_1m_out_cached": 0,
 685      "context_window": 128000,
 686      "default_max_tokens": 8000,
 687      "can_reason": false,
 688      "supports_attachments": true,
 689      "options": {}
 690    },
 691    {
 692      "id": "openai/gpt-4o-mini",
 693      "name": "GPT-4o mini",
 694      "cost_per_1m_in": 0.15,
 695      "cost_per_1m_out": 0.6,
 696      "cost_per_1m_in_cached": 0.075,
 697      "cost_per_1m_out_cached": 0,
 698      "context_window": 128000,
 699      "default_max_tokens": 8000,
 700      "can_reason": false,
 701      "supports_attachments": true,
 702      "options": {}
 703    },
 704    {
 705      "id": "openai/gpt-5",
 706      "name": "GPT-5",
 707      "cost_per_1m_in": 1.25,
 708      "cost_per_1m_out": 10,
 709      "cost_per_1m_in_cached": 0.13,
 710      "cost_per_1m_out_cached": 0,
 711      "context_window": 400000,
 712      "default_max_tokens": 8000,
 713      "can_reason": true,
 714      "reasoning_levels": [
 715        "low",
 716        "medium",
 717        "high"
 718      ],
 719      "default_reasoning_effort": "medium",
 720      "supports_attachments": true,
 721      "options": {}
 722    },
 723    {
 724      "id": "openai/gpt-5-chat",
 725      "name": "GPT-5 Chat",
 726      "cost_per_1m_in": 1.25,
 727      "cost_per_1m_out": 10,
 728      "cost_per_1m_in_cached": 0.125,
 729      "cost_per_1m_out_cached": 0,
 730      "context_window": 128000,
 731      "default_max_tokens": 8000,
 732      "can_reason": true,
 733      "reasoning_levels": [
 734        "low",
 735        "medium",
 736        "high"
 737      ],
 738      "default_reasoning_effort": "medium",
 739      "supports_attachments": true,
 740      "options": {}
 741    },
 742    {
 743      "id": "openai/gpt-5-mini",
 744      "name": "GPT-5 mini",
 745      "cost_per_1m_in": 0.25,
 746      "cost_per_1m_out": 2,
 747      "cost_per_1m_in_cached": 0.03,
 748      "cost_per_1m_out_cached": 0,
 749      "context_window": 400000,
 750      "default_max_tokens": 8000,
 751      "can_reason": true,
 752      "reasoning_levels": [
 753        "low",
 754        "medium",
 755        "high"
 756      ],
 757      "default_reasoning_effort": "medium",
 758      "supports_attachments": true,
 759      "options": {}
 760    },
 761    {
 762      "id": "openai/gpt-5-nano",
 763      "name": "GPT-5 nano",
 764      "cost_per_1m_in": 0.049999999999999996,
 765      "cost_per_1m_out": 0.39999999999999997,
 766      "cost_per_1m_in_cached": 0.01,
 767      "cost_per_1m_out_cached": 0,
 768      "context_window": 400000,
 769      "default_max_tokens": 8000,
 770      "can_reason": true,
 771      "reasoning_levels": [
 772        "low",
 773        "medium",
 774        "high"
 775      ],
 776      "default_reasoning_effort": "medium",
 777      "supports_attachments": true,
 778      "options": {}
 779    },
 780    {
 781      "id": "openai/gpt-5-pro",
 782      "name": "GPT-5 pro",
 783      "cost_per_1m_in": 15,
 784      "cost_per_1m_out": 120,
 785      "cost_per_1m_in_cached": 0,
 786      "cost_per_1m_out_cached": 0,
 787      "context_window": 400000,
 788      "default_max_tokens": 8000,
 789      "can_reason": true,
 790      "reasoning_levels": [
 791        "low",
 792        "medium",
 793        "high"
 794      ],
 795      "default_reasoning_effort": "medium",
 796      "supports_attachments": true,
 797      "options": {}
 798    },
 799    {
 800      "id": "openai/gpt-5-codex",
 801      "name": "GPT-5-Codex",
 802      "cost_per_1m_in": 1.25,
 803      "cost_per_1m_out": 10,
 804      "cost_per_1m_in_cached": 0.13,
 805      "cost_per_1m_out_cached": 0,
 806      "context_window": 400000,
 807      "default_max_tokens": 8000,
 808      "can_reason": true,
 809      "reasoning_levels": [
 810        "low",
 811        "medium",
 812        "high"
 813      ],
 814      "default_reasoning_effort": "medium",
 815      "supports_attachments": true,
 816      "options": {}
 817    },
 818    {
 819      "id": "openai/gpt-5.1-codex-mini",
 820      "name": "GPT-5.1 Codex mini",
 821      "cost_per_1m_in": 0.25,
 822      "cost_per_1m_out": 2,
 823      "cost_per_1m_in_cached": 0.024999999999999998,
 824      "cost_per_1m_out_cached": 0,
 825      "context_window": 400000,
 826      "default_max_tokens": 8000,
 827      "can_reason": true,
 828      "reasoning_levels": [
 829        "low",
 830        "medium",
 831        "high"
 832      ],
 833      "default_reasoning_effort": "medium",
 834      "supports_attachments": true,
 835      "options": {}
 836    },
 837    {
 838      "id": "openai/gpt-5.1-instant",
 839      "name": "GPT-5.1 Instant",
 840      "cost_per_1m_in": 1.25,
 841      "cost_per_1m_out": 10,
 842      "cost_per_1m_in_cached": 0.13,
 843      "cost_per_1m_out_cached": 0,
 844      "context_window": 128000,
 845      "default_max_tokens": 8000,
 846      "can_reason": true,
 847      "reasoning_levels": [
 848        "low",
 849        "medium",
 850        "high"
 851      ],
 852      "default_reasoning_effort": "medium",
 853      "supports_attachments": true,
 854      "options": {}
 855    },
 856    {
 857      "id": "openai/gpt-5.1-codex",
 858      "name": "GPT-5.1-Codex",
 859      "cost_per_1m_in": 1.25,
 860      "cost_per_1m_out": 10,
 861      "cost_per_1m_in_cached": 0.125,
 862      "cost_per_1m_out_cached": 0,
 863      "context_window": 400000,
 864      "default_max_tokens": 8000,
 865      "can_reason": true,
 866      "reasoning_levels": [
 867        "low",
 868        "medium",
 869        "high"
 870      ],
 871      "default_reasoning_effort": "medium",
 872      "supports_attachments": true,
 873      "options": {}
 874    },
 875    {
 876      "id": "openai/gpt-5.2-chat",
 877      "name": "GPT-5.2 Chat",
 878      "cost_per_1m_in": 1.75,
 879      "cost_per_1m_out": 14,
 880      "cost_per_1m_in_cached": 0.175,
 881      "cost_per_1m_out_cached": 0,
 882      "context_window": 128000,
 883      "default_max_tokens": 8000,
 884      "can_reason": true,
 885      "reasoning_levels": [
 886        "low",
 887        "medium",
 888        "high"
 889      ],
 890      "default_reasoning_effort": "medium",
 891      "supports_attachments": true,
 892      "options": {}
 893    },
 894    {
 895      "id": "openai/gpt-5.2-codex",
 896      "name": "GPT-5.2-Codex",
 897      "cost_per_1m_in": 1.75,
 898      "cost_per_1m_out": 14,
 899      "cost_per_1m_in_cached": 0.175,
 900      "cost_per_1m_out_cached": 0,
 901      "context_window": 400000,
 902      "default_max_tokens": 8000,
 903      "can_reason": true,
 904      "reasoning_levels": [
 905        "low",
 906        "medium",
 907        "high"
 908      ],
 909      "default_reasoning_effort": "medium",
 910      "supports_attachments": true,
 911      "options": {}
 912    },
 913    {
 914      "id": "google/gemini-2.5-flash",
 915      "name": "Gemini 2.5 Flash",
 916      "cost_per_1m_in": 0.3,
 917      "cost_per_1m_out": 2.5,
 918      "cost_per_1m_in_cached": 0,
 919      "cost_per_1m_out_cached": 0,
 920      "context_window": 1000000,
 921      "default_max_tokens": 8000,
 922      "can_reason": true,
 923      "reasoning_levels": [
 924        "low",
 925        "medium",
 926        "high"
 927      ],
 928      "default_reasoning_effort": "medium",
 929      "supports_attachments": false,
 930      "options": {}
 931    },
 932    {
 933      "id": "google/gemini-2.5-flash-lite",
 934      "name": "Gemini 2.5 Flash Lite",
 935      "cost_per_1m_in": 0.09999999999999999,
 936      "cost_per_1m_out": 0.39999999999999997,
 937      "cost_per_1m_in_cached": 0.01,
 938      "cost_per_1m_out_cached": 0,
 939      "context_window": 1048576,
 940      "default_max_tokens": 8000,
 941      "can_reason": true,
 942      "reasoning_levels": [
 943        "low",
 944        "medium",
 945        "high"
 946      ],
 947      "default_reasoning_effort": "medium",
 948      "supports_attachments": true,
 949      "options": {}
 950    },
 951    {
 952      "id": "google/gemini-2.5-flash-lite-preview-09-2025",
 953      "name": "Gemini 2.5 Flash Lite Preview 09-2025",
 954      "cost_per_1m_in": 0.09999999999999999,
 955      "cost_per_1m_out": 0.39999999999999997,
 956      "cost_per_1m_in_cached": 0.01,
 957      "cost_per_1m_out_cached": 0,
 958      "context_window": 1048576,
 959      "default_max_tokens": 8000,
 960      "can_reason": true,
 961      "reasoning_levels": [
 962        "low",
 963        "medium",
 964        "high"
 965      ],
 966      "default_reasoning_effort": "medium",
 967      "supports_attachments": true,
 968      "options": {}
 969    },
 970    {
 971      "id": "google/gemini-2.5-flash-preview-09-2025",
 972      "name": "Gemini 2.5 Flash Preview 09-2025",
 973      "cost_per_1m_in": 0.3,
 974      "cost_per_1m_out": 2.5,
 975      "cost_per_1m_in_cached": 0.03,
 976      "cost_per_1m_out_cached": 0,
 977      "context_window": 1000000,
 978      "default_max_tokens": 8000,
 979      "can_reason": true,
 980      "reasoning_levels": [
 981        "low",
 982        "medium",
 983        "high"
 984      ],
 985      "default_reasoning_effort": "medium",
 986      "supports_attachments": true,
 987      "options": {}
 988    },
 989    {
 990      "id": "google/gemini-2.5-pro",
 991      "name": "Gemini 2.5 Pro",
 992      "cost_per_1m_in": 1.25,
 993      "cost_per_1m_out": 10,
 994      "cost_per_1m_in_cached": 0,
 995      "cost_per_1m_out_cached": 0,
 996      "context_window": 1048576,
 997      "default_max_tokens": 8000,
 998      "can_reason": true,
 999      "reasoning_levels": [
1000        "low",
1001        "medium",
1002        "high"
1003      ],
1004      "default_reasoning_effort": "medium",
1005      "supports_attachments": false,
1006      "options": {}
1007    },
1008    {
1009      "id": "google/gemini-3-flash",
1010      "name": "Gemini 3 Flash",
1011      "cost_per_1m_in": 0.5,
1012      "cost_per_1m_out": 3,
1013      "cost_per_1m_in_cached": 0.049999999999999996,
1014      "cost_per_1m_out_cached": 0,
1015      "context_window": 1000000,
1016      "default_max_tokens": 8000,
1017      "can_reason": true,
1018      "reasoning_levels": [
1019        "low",
1020        "medium",
1021        "high"
1022      ],
1023      "default_reasoning_effort": "medium",
1024      "supports_attachments": true,
1025      "options": {}
1026    },
1027    {
1028      "id": "google/gemini-3-pro-preview",
1029      "name": "Gemini 3 Pro Preview",
1030      "cost_per_1m_in": 2,
1031      "cost_per_1m_out": 12,
1032      "cost_per_1m_in_cached": 0.19999999999999998,
1033      "cost_per_1m_out_cached": 0,
1034      "context_window": 1000000,
1035      "default_max_tokens": 8000,
1036      "can_reason": true,
1037      "reasoning_levels": [
1038        "low",
1039        "medium",
1040        "high"
1041      ],
1042      "default_reasoning_effort": "medium",
1043      "supports_attachments": true,
1044      "options": {}
1045    },
1046    {
1047      "id": "xai/grok-2-vision",
1048      "name": "Grok 2 Vision",
1049      "cost_per_1m_in": 2,
1050      "cost_per_1m_out": 10,
1051      "cost_per_1m_in_cached": 0,
1052      "cost_per_1m_out_cached": 0,
1053      "context_window": 32768,
1054      "default_max_tokens": 8000,
1055      "can_reason": false,
1056      "supports_attachments": true,
1057      "options": {}
1058    },
1059    {
1060      "id": "xai/grok-3",
1061      "name": "Grok 3 Beta",
1062      "cost_per_1m_in": 3,
1063      "cost_per_1m_out": 15,
1064      "cost_per_1m_in_cached": 0,
1065      "cost_per_1m_out_cached": 0,
1066      "context_window": 131072,
1067      "default_max_tokens": 8000,
1068      "can_reason": false,
1069      "supports_attachments": false,
1070      "options": {}
1071    },
1072    {
1073      "id": "xai/grok-3-fast",
1074      "name": "Grok 3 Fast Beta",
1075      "cost_per_1m_in": 5,
1076      "cost_per_1m_out": 25,
1077      "cost_per_1m_in_cached": 0,
1078      "cost_per_1m_out_cached": 0,
1079      "context_window": 131072,
1080      "default_max_tokens": 8000,
1081      "can_reason": false,
1082      "supports_attachments": false,
1083      "options": {}
1084    },
1085    {
1086      "id": "xai/grok-3-mini",
1087      "name": "Grok 3 Mini Beta",
1088      "cost_per_1m_in": 0.3,
1089      "cost_per_1m_out": 0.5,
1090      "cost_per_1m_in_cached": 0,
1091      "cost_per_1m_out_cached": 0,
1092      "context_window": 131072,
1093      "default_max_tokens": 8000,
1094      "can_reason": false,
1095      "supports_attachments": false,
1096      "options": {}
1097    },
1098    {
1099      "id": "xai/grok-3-mini-fast",
1100      "name": "Grok 3 Mini Fast Beta",
1101      "cost_per_1m_in": 0.6,
1102      "cost_per_1m_out": 4,
1103      "cost_per_1m_in_cached": 0,
1104      "cost_per_1m_out_cached": 0,
1105      "context_window": 131072,
1106      "default_max_tokens": 8000,
1107      "can_reason": false,
1108      "supports_attachments": false,
1109      "options": {}
1110    },
1111    {
1112      "id": "xai/grok-4",
1113      "name": "Grok 4",
1114      "cost_per_1m_in": 3,
1115      "cost_per_1m_out": 15,
1116      "cost_per_1m_in_cached": 0,
1117      "cost_per_1m_out_cached": 0,
1118      "context_window": 256000,
1119      "default_max_tokens": 8000,
1120      "can_reason": true,
1121      "reasoning_levels": [
1122        "low",
1123        "medium",
1124        "high"
1125      ],
1126      "default_reasoning_effort": "medium",
1127      "supports_attachments": true,
1128      "options": {}
1129    },
1130    {
1131      "id": "xai/grok-4-fast-non-reasoning",
1132      "name": "Grok 4 Fast Non-Reasoning",
1133      "cost_per_1m_in": 0.19999999999999998,
1134      "cost_per_1m_out": 0.5,
1135      "cost_per_1m_in_cached": 0.049999999999999996,
1136      "cost_per_1m_out_cached": 0,
1137      "context_window": 2000000,
1138      "default_max_tokens": 8000,
1139      "can_reason": false,
1140      "supports_attachments": false,
1141      "options": {}
1142    },
1143    {
1144      "id": "xai/grok-4-fast-reasoning",
1145      "name": "Grok 4 Fast Reasoning",
1146      "cost_per_1m_in": 0.19999999999999998,
1147      "cost_per_1m_out": 0.5,
1148      "cost_per_1m_in_cached": 0.049999999999999996,
1149      "cost_per_1m_out_cached": 0,
1150      "context_window": 2000000,
1151      "default_max_tokens": 8000,
1152      "can_reason": true,
1153      "reasoning_levels": [
1154        "low",
1155        "medium",
1156        "high"
1157      ],
1158      "default_reasoning_effort": "medium",
1159      "supports_attachments": false,
1160      "options": {}
1161    },
1162    {
1163      "id": "xai/grok-4.1-fast-non-reasoning",
1164      "name": "Grok 4.1 Fast Non-Reasoning",
1165      "cost_per_1m_in": 0.19999999999999998,
1166      "cost_per_1m_out": 0.5,
1167      "cost_per_1m_in_cached": 0.049999999999999996,
1168      "cost_per_1m_out_cached": 0,
1169      "context_window": 2000000,
1170      "default_max_tokens": 8000,
1171      "can_reason": false,
1172      "supports_attachments": false,
1173      "options": {}
1174    },
1175    {
1176      "id": "xai/grok-4.1-fast-reasoning",
1177      "name": "Grok 4.1 Fast Reasoning",
1178      "cost_per_1m_in": 0.19999999999999998,
1179      "cost_per_1m_out": 0.5,
1180      "cost_per_1m_in_cached": 0.049999999999999996,
1181      "cost_per_1m_out_cached": 0,
1182      "context_window": 2000000,
1183      "default_max_tokens": 8000,
1184      "can_reason": true,
1185      "reasoning_levels": [
1186        "low",
1187        "medium",
1188        "high"
1189      ],
1190      "default_reasoning_effort": "medium",
1191      "supports_attachments": false,
1192      "options": {}
1193    },
1194    {
1195      "id": "xai/grok-code-fast-1",
1196      "name": "Grok Code Fast 1",
1197      "cost_per_1m_in": 0.19999999999999998,
1198      "cost_per_1m_out": 1.5,
1199      "cost_per_1m_in_cached": 0.02,
1200      "cost_per_1m_out_cached": 0,
1201      "context_window": 256000,
1202      "default_max_tokens": 8000,
1203      "can_reason": true,
1204      "reasoning_levels": [
1205        "low",
1206        "medium",
1207        "high"
1208      ],
1209      "default_reasoning_effort": "medium",
1210      "supports_attachments": false,
1211      "options": {}
1212    },
1213    {
1214      "id": "prime-intellect/intellect-3",
1215      "name": "INTELLECT 3",
1216      "cost_per_1m_in": 0.19999999999999998,
1217      "cost_per_1m_out": 1.1,
1218      "cost_per_1m_in_cached": 0,
1219      "cost_per_1m_out_cached": 0,
1220      "context_window": 131072,
1221      "default_max_tokens": 8000,
1222      "can_reason": true,
1223      "reasoning_levels": [
1224        "low",
1225        "medium",
1226        "high"
1227      ],
1228      "default_reasoning_effort": "medium",
1229      "supports_attachments": false,
1230      "options": {}
1231    },
1232    {
1233      "id": "moonshotai/kimi-k2",
1234      "name": "Kimi K2",
1235      "cost_per_1m_in": 0.5,
1236      "cost_per_1m_out": 2,
1237      "cost_per_1m_in_cached": 0,
1238      "cost_per_1m_out_cached": 0,
1239      "context_window": 131072,
1240      "default_max_tokens": 8000,
1241      "can_reason": false,
1242      "supports_attachments": false,
1243      "options": {}
1244    },
1245    {
1246      "id": "moonshotai/kimi-k2-thinking",
1247      "name": "Kimi K2 Thinking",
1248      "cost_per_1m_in": 0.47,
1249      "cost_per_1m_out": 2,
1250      "cost_per_1m_in_cached": 0.14100000000000001,
1251      "cost_per_1m_out_cached": 0,
1252      "context_window": 216144,
1253      "default_max_tokens": 8000,
1254      "can_reason": true,
1255      "reasoning_levels": [
1256        "low",
1257        "medium",
1258        "high"
1259      ],
1260      "default_reasoning_effort": "medium",
1261      "supports_attachments": false,
1262      "options": {}
1263    },
1264    {
1265      "id": "moonshotai/kimi-k2-thinking-turbo",
1266      "name": "Kimi K2 Thinking Turbo",
1267      "cost_per_1m_in": 1.15,
1268      "cost_per_1m_out": 8,
1269      "cost_per_1m_in_cached": 0.15,
1270      "cost_per_1m_out_cached": 0,
1271      "context_window": 262114,
1272      "default_max_tokens": 8000,
1273      "can_reason": true,
1274      "reasoning_levels": [
1275        "low",
1276        "medium",
1277        "high"
1278      ],
1279      "default_reasoning_effort": "medium",
1280      "supports_attachments": false,
1281      "options": {}
1282    },
1283    {
1284      "id": "moonshotai/kimi-k2-turbo",
1285      "name": "Kimi K2 Turbo",
1286      "cost_per_1m_in": 2.4,
1287      "cost_per_1m_out": 10,
1288      "cost_per_1m_in_cached": 0,
1289      "cost_per_1m_out_cached": 0,
1290      "context_window": 256000,
1291      "default_max_tokens": 8000,
1292      "can_reason": false,
1293      "supports_attachments": false,
1294      "options": {}
1295    },
1296    {
1297      "id": "moonshotai/kimi-k2.5",
1298      "name": "Kimi K2.5",
1299      "cost_per_1m_in": 0.5,
1300      "cost_per_1m_out": 2.8,
1301      "cost_per_1m_in_cached": 0,
1302      "cost_per_1m_out_cached": 0,
1303      "context_window": 256000,
1304      "default_max_tokens": 8000,
1305      "can_reason": true,
1306      "reasoning_levels": [
1307        "low",
1308        "medium",
1309        "high"
1310      ],
1311      "default_reasoning_effort": "medium",
1312      "supports_attachments": true,
1313      "options": {}
1314    },
1315    {
1316      "id": "meta/llama-3.1-70b",
1317      "name": "Llama 3.1 70B Instruct",
1318      "cost_per_1m_in": 0.39999999999999997,
1319      "cost_per_1m_out": 0.39999999999999997,
1320      "cost_per_1m_in_cached": 0,
1321      "cost_per_1m_out_cached": 0,
1322      "context_window": 131072,
1323      "default_max_tokens": 8000,
1324      "can_reason": false,
1325      "supports_attachments": false,
1326      "options": {}
1327    },
1328    {
1329      "id": "meta/llama-3.1-8b",
1330      "name": "Llama 3.1 8B Instruct",
1331      "cost_per_1m_in": 0.03,
1332      "cost_per_1m_out": 0.049999999999999996,
1333      "cost_per_1m_in_cached": 0,
1334      "cost_per_1m_out_cached": 0,
1335      "context_window": 131072,
1336      "default_max_tokens": 8000,
1337      "can_reason": false,
1338      "supports_attachments": false,
1339      "options": {}
1340    },
1341    {
1342      "id": "meta/llama-3.2-11b",
1343      "name": "Llama 3.2 11B Vision Instruct",
1344      "cost_per_1m_in": 0.16,
1345      "cost_per_1m_out": 0.16,
1346      "cost_per_1m_in_cached": 0,
1347      "cost_per_1m_out_cached": 0,
1348      "context_window": 128000,
1349      "default_max_tokens": 8000,
1350      "can_reason": false,
1351      "supports_attachments": true,
1352      "options": {}
1353    },
1354    {
1355      "id": "meta/llama-3.2-90b",
1356      "name": "Llama 3.2 90B Vision Instruct",
1357      "cost_per_1m_in": 0.72,
1358      "cost_per_1m_out": 0.72,
1359      "cost_per_1m_in_cached": 0,
1360      "cost_per_1m_out_cached": 0,
1361      "context_window": 128000,
1362      "default_max_tokens": 8000,
1363      "can_reason": false,
1364      "supports_attachments": true,
1365      "options": {}
1366    },
1367    {
1368      "id": "meta/llama-3.3-70b",
1369      "name": "Llama 3.3 70B Instruct",
1370      "cost_per_1m_in": 0.72,
1371      "cost_per_1m_out": 0.72,
1372      "cost_per_1m_in_cached": 0,
1373      "cost_per_1m_out_cached": 0,
1374      "context_window": 128000,
1375      "default_max_tokens": 8000,
1376      "can_reason": false,
1377      "supports_attachments": false,
1378      "options": {}
1379    },
1380    {
1381      "id": "meta/llama-4-maverick",
1382      "name": "Llama 4 Maverick 17B Instruct",
1383      "cost_per_1m_in": 0.15,
1384      "cost_per_1m_out": 0.6,
1385      "cost_per_1m_in_cached": 0,
1386      "cost_per_1m_out_cached": 0,
1387      "context_window": 131072,
1388      "default_max_tokens": 8000,
1389      "can_reason": false,
1390      "supports_attachments": true,
1391      "options": {}
1392    },
1393    {
1394      "id": "meta/llama-4-scout",
1395      "name": "Llama 4 Scout 17B Instruct",
1396      "cost_per_1m_in": 0.08,
1397      "cost_per_1m_out": 0.3,
1398      "cost_per_1m_in_cached": 0,
1399      "cost_per_1m_out_cached": 0,
1400      "context_window": 131072,
1401      "default_max_tokens": 8000,
1402      "can_reason": false,
1403      "supports_attachments": true,
1404      "options": {}
1405    },
1406    {
1407      "id": "meituan/longcat-flash-chat",
1408      "name": "LongCat Flash Chat",
1409      "cost_per_1m_in": 0,
1410      "cost_per_1m_out": 0,
1411      "cost_per_1m_in_cached": 0,
1412      "cost_per_1m_out_cached": 0,
1413      "context_window": 128000,
1414      "default_max_tokens": 8000,
1415      "can_reason": false,
1416      "supports_attachments": false,
1417      "options": {}
1418    },
1419    {
1420      "id": "meituan/longcat-flash-thinking",
1421      "name": "LongCat Flash Thinking",
1422      "cost_per_1m_in": 0.15,
1423      "cost_per_1m_out": 1.5,
1424      "cost_per_1m_in_cached": 0,
1425      "cost_per_1m_out_cached": 0,
1426      "context_window": 128000,
1427      "default_max_tokens": 8000,
1428      "can_reason": true,
1429      "reasoning_levels": [
1430        "low",
1431        "medium",
1432        "high"
1433      ],
1434      "default_reasoning_effort": "medium",
1435      "supports_attachments": false,
1436      "options": {}
1437    },
1438    {
1439      "id": "inception/mercury-coder-small",
1440      "name": "Mercury Coder Small Beta",
1441      "cost_per_1m_in": 0.25,
1442      "cost_per_1m_out": 1,
1443      "cost_per_1m_in_cached": 0,
1444      "cost_per_1m_out_cached": 0,
1445      "context_window": 32000,
1446      "default_max_tokens": 8000,
1447      "can_reason": false,
1448      "supports_attachments": false,
1449      "options": {}
1450    },
1451    {
1452      "id": "xiaomi/mimo-v2-flash",
1453      "name": "MiMo V2 Flash",
1454      "cost_per_1m_in": 0.09,
1455      "cost_per_1m_out": 0.29,
1456      "cost_per_1m_in_cached": 0,
1457      "cost_per_1m_out_cached": 0,
1458      "context_window": 262144,
1459      "default_max_tokens": 8000,
1460      "can_reason": true,
1461      "reasoning_levels": [
1462        "low",
1463        "medium",
1464        "high"
1465      ],
1466      "default_reasoning_effort": "medium",
1467      "supports_attachments": false,
1468      "options": {}
1469    },
1470    {
1471      "id": "minimax/minimax-m2",
1472      "name": "MiniMax M2",
1473      "cost_per_1m_in": 0.3,
1474      "cost_per_1m_out": 1.2,
1475      "cost_per_1m_in_cached": 0.03,
1476      "cost_per_1m_out_cached": 0.375,
1477      "context_window": 205000,
1478      "default_max_tokens": 8000,
1479      "can_reason": true,
1480      "reasoning_levels": [
1481        "low",
1482        "medium",
1483        "high"
1484      ],
1485      "default_reasoning_effort": "medium",
1486      "supports_attachments": false,
1487      "options": {}
1488    },
1489    {
1490      "id": "minimax/minimax-m2.1",
1491      "name": "MiniMax M2.1",
1492      "cost_per_1m_in": 0.3,
1493      "cost_per_1m_out": 1.2,
1494      "cost_per_1m_in_cached": 0.15,
1495      "cost_per_1m_out_cached": 0,
1496      "context_window": 204800,
1497      "default_max_tokens": 8000,
1498      "can_reason": true,
1499      "reasoning_levels": [
1500        "low",
1501        "medium",
1502        "high"
1503      ],
1504      "default_reasoning_effort": "medium",
1505      "supports_attachments": false,
1506      "options": {}
1507    },
1508    {
1509      "id": "minimax/minimax-m2.1-lightning",
1510      "name": "MiniMax M2.1 Lightning",
1511      "cost_per_1m_in": 0.3,
1512      "cost_per_1m_out": 2.4,
1513      "cost_per_1m_in_cached": 0.03,
1514      "cost_per_1m_out_cached": 0.375,
1515      "context_window": 204800,
1516      "default_max_tokens": 8000,
1517      "can_reason": true,
1518      "reasoning_levels": [
1519        "low",
1520        "medium",
1521        "high"
1522      ],
1523      "default_reasoning_effort": "medium",
1524      "supports_attachments": false,
1525      "options": {}
1526    },
1527    {
1528      "id": "mistral/ministral-3b",
1529      "name": "Ministral 3B",
1530      "cost_per_1m_in": 0.04,
1531      "cost_per_1m_out": 0.04,
1532      "cost_per_1m_in_cached": 0,
1533      "cost_per_1m_out_cached": 0,
1534      "context_window": 128000,
1535      "default_max_tokens": 4000,
1536      "can_reason": false,
1537      "supports_attachments": false,
1538      "options": {}
1539    },
1540    {
1541      "id": "mistral/ministral-8b",
1542      "name": "Ministral 8B",
1543      "cost_per_1m_in": 0.09999999999999999,
1544      "cost_per_1m_out": 0.09999999999999999,
1545      "cost_per_1m_in_cached": 0,
1546      "cost_per_1m_out_cached": 0,
1547      "context_window": 128000,
1548      "default_max_tokens": 4000,
1549      "can_reason": false,
1550      "supports_attachments": false,
1551      "options": {}
1552    },
1553    {
1554      "id": "mistral/codestral",
1555      "name": "Mistral Codestral",
1556      "cost_per_1m_in": 0.3,
1557      "cost_per_1m_out": 0.8999999999999999,
1558      "cost_per_1m_in_cached": 0,
1559      "cost_per_1m_out_cached": 0,
1560      "context_window": 128000,
1561      "default_max_tokens": 4000,
1562      "can_reason": false,
1563      "supports_attachments": false,
1564      "options": {}
1565    },
1566    {
1567      "id": "mistral/mistral-medium",
1568      "name": "Mistral Medium 3.1",
1569      "cost_per_1m_in": 0.39999999999999997,
1570      "cost_per_1m_out": 2,
1571      "cost_per_1m_in_cached": 0,
1572      "cost_per_1m_out_cached": 0,
1573      "context_window": 128000,
1574      "default_max_tokens": 8000,
1575      "can_reason": false,
1576      "supports_attachments": true,
1577      "options": {}
1578    },
1579    {
1580      "id": "mistral/mistral-small",
1581      "name": "Mistral Small",
1582      "cost_per_1m_in": 0.09999999999999999,
1583      "cost_per_1m_out": 0.3,
1584      "cost_per_1m_in_cached": 0,
1585      "cost_per_1m_out_cached": 0,
1586      "context_window": 32000,
1587      "default_max_tokens": 4000,
1588      "can_reason": false,
1589      "supports_attachments": true,
1590      "options": {}
1591    },
1592    {
1593      "id": "nvidia/nemotron-nano-12b-v2-vl",
1594      "name": "Nvidia Nemotron Nano 12B V2 VL",
1595      "cost_per_1m_in": 0.19999999999999998,
1596      "cost_per_1m_out": 0.6,
1597      "cost_per_1m_in_cached": 0,
1598      "cost_per_1m_out_cached": 0,
1599      "context_window": 131072,
1600      "default_max_tokens": 8000,
1601      "can_reason": true,
1602      "reasoning_levels": [
1603        "low",
1604        "medium",
1605        "high"
1606      ],
1607      "default_reasoning_effort": "medium",
1608      "supports_attachments": true,
1609      "options": {}
1610    },
1611    {
1612      "id": "nvidia/nemotron-nano-9b-v2",
1613      "name": "Nvidia Nemotron Nano 9B V2",
1614      "cost_per_1m_in": 0.04,
1615      "cost_per_1m_out": 0.16,
1616      "cost_per_1m_in_cached": 0,
1617      "cost_per_1m_out_cached": 0,
1618      "context_window": 131072,
1619      "default_max_tokens": 8000,
1620      "can_reason": true,
1621      "reasoning_levels": [
1622        "low",
1623        "medium",
1624        "high"
1625      ],
1626      "default_reasoning_effort": "medium",
1627      "supports_attachments": false,
1628      "options": {}
1629    },
1630    {
1631      "id": "mistral/pixtral-12b",
1632      "name": "Pixtral 12B 2409",
1633      "cost_per_1m_in": 0.15,
1634      "cost_per_1m_out": 0.15,
1635      "cost_per_1m_in_cached": 0,
1636      "cost_per_1m_out_cached": 0,
1637      "context_window": 128000,
1638      "default_max_tokens": 4000,
1639      "can_reason": false,
1640      "supports_attachments": true,
1641      "options": {}
1642    },
1643    {
1644      "id": "mistral/pixtral-large",
1645      "name": "Pixtral Large",
1646      "cost_per_1m_in": 2,
1647      "cost_per_1m_out": 6,
1648      "cost_per_1m_in_cached": 0,
1649      "cost_per_1m_out_cached": 0,
1650      "context_window": 128000,
1651      "default_max_tokens": 4000,
1652      "can_reason": false,
1653      "supports_attachments": true,
1654      "options": {}
1655    },
1656    {
1657      "id": "alibaba/qwen-3-32b",
1658      "name": "Qwen 3 32B",
1659      "cost_per_1m_in": 0.09999999999999999,
1660      "cost_per_1m_out": 0.3,
1661      "cost_per_1m_in_cached": 0,
1662      "cost_per_1m_out_cached": 0,
1663      "context_window": 40960,
1664      "default_max_tokens": 8000,
1665      "can_reason": true,
1666      "reasoning_levels": [
1667        "low",
1668        "medium",
1669        "high"
1670      ],
1671      "default_reasoning_effort": "medium",
1672      "supports_attachments": false,
1673      "options": {}
1674    },
1675    {
1676      "id": "alibaba/qwen3-coder-30b-a3b",
1677      "name": "Qwen 3 Coder 30B A3B Instruct",
1678      "cost_per_1m_in": 0.07,
1679      "cost_per_1m_out": 0.27,
1680      "cost_per_1m_in_cached": 0,
1681      "cost_per_1m_out_cached": 0,
1682      "context_window": 160000,
1683      "default_max_tokens": 8000,
1684      "can_reason": true,
1685      "reasoning_levels": [
1686        "low",
1687        "medium",
1688        "high"
1689      ],
1690      "default_reasoning_effort": "medium",
1691      "supports_attachments": false,
1692      "options": {}
1693    },
1694    {
1695      "id": "alibaba/qwen3-max-thinking",
1696      "name": "Qwen 3 Max Thinking",
1697      "cost_per_1m_in": 1.2,
1698      "cost_per_1m_out": 6,
1699      "cost_per_1m_in_cached": 0.24,
1700      "cost_per_1m_out_cached": 0,
1701      "context_window": 256000,
1702      "default_max_tokens": 8000,
1703      "can_reason": true,
1704      "reasoning_levels": [
1705        "low",
1706        "medium",
1707        "high"
1708      ],
1709      "default_reasoning_effort": "medium",
1710      "supports_attachments": false,
1711      "options": {}
1712    },
1713    {
1714      "id": "alibaba/qwen3-235b-a22b-thinking",
1715      "name": "Qwen3 235B A22B Thinking 2507",
1716      "cost_per_1m_in": 0.3,
1717      "cost_per_1m_out": 2.9000000000000004,
1718      "cost_per_1m_in_cached": 0,
1719      "cost_per_1m_out_cached": 0,
1720      "context_window": 262114,
1721      "default_max_tokens": 8000,
1722      "can_reason": true,
1723      "reasoning_levels": [
1724        "low",
1725        "medium",
1726        "high"
1727      ],
1728      "default_reasoning_effort": "medium",
1729      "supports_attachments": true,
1730      "options": {}
1731    },
1732    {
1733      "id": "alibaba/qwen3-coder",
1734      "name": "Qwen3 Coder 480B A35B Instruct",
1735      "cost_per_1m_in": 0.39999999999999997,
1736      "cost_per_1m_out": 1.5999999999999999,
1737      "cost_per_1m_in_cached": 0,
1738      "cost_per_1m_out_cached": 0,
1739      "context_window": 262144,
1740      "default_max_tokens": 8000,
1741      "can_reason": false,
1742      "supports_attachments": false,
1743      "options": {}
1744    },
1745    {
1746      "id": "alibaba/qwen3-coder-next",
1747      "name": "Qwen3 Coder Next",
1748      "cost_per_1m_in": 0.5,
1749      "cost_per_1m_out": 1.2,
1750      "cost_per_1m_in_cached": 0,
1751      "cost_per_1m_out_cached": 0,
1752      "context_window": 256000,
1753      "default_max_tokens": 8000,
1754      "can_reason": true,
1755      "reasoning_levels": [
1756        "low",
1757        "medium",
1758        "high"
1759      ],
1760      "default_reasoning_effort": "medium",
1761      "supports_attachments": false,
1762      "options": {}
1763    },
1764    {
1765      "id": "alibaba/qwen3-coder-plus",
1766      "name": "Qwen3 Coder Plus",
1767      "cost_per_1m_in": 1,
1768      "cost_per_1m_out": 5,
1769      "cost_per_1m_in_cached": 0.19999999999999998,
1770      "cost_per_1m_out_cached": 0,
1771      "context_window": 1000000,
1772      "default_max_tokens": 8000,
1773      "can_reason": false,
1774      "supports_attachments": false,
1775      "options": {}
1776    },
1777    {
1778      "id": "alibaba/qwen3-max-preview",
1779      "name": "Qwen3 Max Preview",
1780      "cost_per_1m_in": 1.2,
1781      "cost_per_1m_out": 6,
1782      "cost_per_1m_in_cached": 0.24,
1783      "cost_per_1m_out_cached": 0,
1784      "context_window": 262144,
1785      "default_max_tokens": 8000,
1786      "can_reason": false,
1787      "supports_attachments": false,
1788      "options": {}
1789    },
1790    {
1791      "id": "alibaba/qwen3-vl-thinking",
1792      "name": "Qwen3 VL 235B A22B Thinking",
1793      "cost_per_1m_in": 0.22,
1794      "cost_per_1m_out": 0.88,
1795      "cost_per_1m_in_cached": 0,
1796      "cost_per_1m_out_cached": 0,
1797      "context_window": 256000,
1798      "default_max_tokens": 8000,
1799      "can_reason": true,
1800      "reasoning_levels": [
1801        "low",
1802        "medium",
1803        "high"
1804      ],
1805      "default_reasoning_effort": "medium",
1806      "supports_attachments": true,
1807      "options": {}
1808    },
1809    {
1810      "id": "alibaba/qwen-3-14b",
1811      "name": "Qwen3-14B",
1812      "cost_per_1m_in": 0.06,
1813      "cost_per_1m_out": 0.24,
1814      "cost_per_1m_in_cached": 0,
1815      "cost_per_1m_out_cached": 0,
1816      "context_window": 40960,
1817      "default_max_tokens": 8000,
1818      "can_reason": true,
1819      "reasoning_levels": [
1820        "low",
1821        "medium",
1822        "high"
1823      ],
1824      "default_reasoning_effort": "medium",
1825      "supports_attachments": false,
1826      "options": {}
1827    },
1828    {
1829      "id": "alibaba/qwen-3-235b",
1830      "name": "Qwen3-235B-A22B",
1831      "cost_per_1m_in": 0.071,
1832      "cost_per_1m_out": 0.463,
1833      "cost_per_1m_in_cached": 0,
1834      "cost_per_1m_out_cached": 0,
1835      "context_window": 40960,
1836      "default_max_tokens": 8000,
1837      "can_reason": false,
1838      "supports_attachments": false,
1839      "options": {}
1840    },
1841    {
1842      "id": "alibaba/qwen-3-30b",
1843      "name": "Qwen3-30B-A3B",
1844      "cost_per_1m_in": 0.08,
1845      "cost_per_1m_out": 0.29,
1846      "cost_per_1m_in_cached": 0,
1847      "cost_per_1m_out_cached": 0,
1848      "context_window": 40960,
1849      "default_max_tokens": 8000,
1850      "can_reason": true,
1851      "reasoning_levels": [
1852        "low",
1853        "medium",
1854        "high"
1855      ],
1856      "default_reasoning_effort": "medium",
1857      "supports_attachments": false,
1858      "options": {}
1859    },
1860    {
1861      "id": "bytedance/seed-1.6",
1862      "name": "Seed 1.6",
1863      "cost_per_1m_in": 0.25,
1864      "cost_per_1m_out": 2,
1865      "cost_per_1m_in_cached": 0.049999999999999996,
1866      "cost_per_1m_out_cached": 0,
1867      "context_window": 256000,
1868      "default_max_tokens": 8000,
1869      "can_reason": true,
1870      "reasoning_levels": [
1871        "low",
1872        "medium",
1873        "high"
1874      ],
1875      "default_reasoning_effort": "medium",
1876      "supports_attachments": false,
1877      "options": {}
1878    },
1879    {
1880      "id": "perplexity/sonar",
1881      "name": "Sonar",
1882      "cost_per_1m_in": 1,
1883      "cost_per_1m_out": 1,
1884      "cost_per_1m_in_cached": 0,
1885      "cost_per_1m_out_cached": 0,
1886      "context_window": 127000,
1887      "default_max_tokens": 8000,
1888      "can_reason": false,
1889      "supports_attachments": true,
1890      "options": {}
1891    },
1892    {
1893      "id": "perplexity/sonar-pro",
1894      "name": "Sonar Pro",
1895      "cost_per_1m_in": 3,
1896      "cost_per_1m_out": 15,
1897      "cost_per_1m_in_cached": 0,
1898      "cost_per_1m_out_cached": 0,
1899      "context_window": 200000,
1900      "default_max_tokens": 8000,
1901      "can_reason": false,
1902      "supports_attachments": true,
1903      "options": {}
1904    },
1905    {
1906      "id": "arcee-ai/trinity-large-preview",
1907      "name": "Trinity Large Preview",
1908      "cost_per_1m_in": 0.25,
1909      "cost_per_1m_out": 1,
1910      "cost_per_1m_in_cached": 0,
1911      "cost_per_1m_out_cached": 0,
1912      "context_window": 131000,
1913      "default_max_tokens": 8000,
1914      "can_reason": false,
1915      "supports_attachments": false,
1916      "options": {}
1917    },
1918    {
1919      "id": "openai/gpt-oss-120b",
1920      "name": "gpt-oss-120b",
1921      "cost_per_1m_in": 0.09999999999999999,
1922      "cost_per_1m_out": 0.5,
1923      "cost_per_1m_in_cached": 0,
1924      "cost_per_1m_out_cached": 0,
1925      "context_window": 131072,
1926      "default_max_tokens": 8000,
1927      "can_reason": true,
1928      "reasoning_levels": [
1929        "low",
1930        "medium",
1931        "high"
1932      ],
1933      "default_reasoning_effort": "medium",
1934      "supports_attachments": false,
1935      "options": {}
1936    },
1937    {
1938      "id": "openai/gpt-oss-20b",
1939      "name": "gpt-oss-20b",
1940      "cost_per_1m_in": 0.07,
1941      "cost_per_1m_out": 0.3,
1942      "cost_per_1m_in_cached": 0,
1943      "cost_per_1m_out_cached": 0,
1944      "context_window": 128000,
1945      "default_max_tokens": 8000,
1946      "can_reason": true,
1947      "reasoning_levels": [
1948        "low",
1949        "medium",
1950        "high"
1951      ],
1952      "default_reasoning_effort": "medium",
1953      "supports_attachments": false,
1954      "options": {}
1955    },
1956    {
1957      "id": "openai/gpt-oss-safeguard-20b",
1958      "name": "gpt-oss-safeguard-20b",
1959      "cost_per_1m_in": 0.075,
1960      "cost_per_1m_out": 0.3,
1961      "cost_per_1m_in_cached": 0.037,
1962      "cost_per_1m_out_cached": 0,
1963      "context_window": 131072,
1964      "default_max_tokens": 8000,
1965      "can_reason": true,
1966      "reasoning_levels": [
1967        "low",
1968        "medium",
1969        "high"
1970      ],
1971      "default_reasoning_effort": "medium",
1972      "supports_attachments": false,
1973      "options": {}
1974    },
1975    {
1976      "id": "openai/o1",
1977      "name": "o1",
1978      "cost_per_1m_in": 15,
1979      "cost_per_1m_out": 60,
1980      "cost_per_1m_in_cached": 7.5,
1981      "cost_per_1m_out_cached": 0,
1982      "context_window": 200000,
1983      "default_max_tokens": 8000,
1984      "can_reason": true,
1985      "reasoning_levels": [
1986        "low",
1987        "medium",
1988        "high"
1989      ],
1990      "default_reasoning_effort": "medium",
1991      "supports_attachments": true,
1992      "options": {}
1993    },
1994    {
1995      "id": "openai/o3",
1996      "name": "o3",
1997      "cost_per_1m_in": 2,
1998      "cost_per_1m_out": 8,
1999      "cost_per_1m_in_cached": 0.5,
2000      "cost_per_1m_out_cached": 0,
2001      "context_window": 200000,
2002      "default_max_tokens": 8000,
2003      "can_reason": true,
2004      "reasoning_levels": [
2005        "low",
2006        "medium",
2007        "high"
2008      ],
2009      "default_reasoning_effort": "medium",
2010      "supports_attachments": true,
2011      "options": {}
2012    },
2013    {
2014      "id": "openai/o3-pro",
2015      "name": "o3 Pro",
2016      "cost_per_1m_in": 20,
2017      "cost_per_1m_out": 80,
2018      "cost_per_1m_in_cached": 0,
2019      "cost_per_1m_out_cached": 0,
2020      "context_window": 200000,
2021      "default_max_tokens": 8000,
2022      "can_reason": true,
2023      "reasoning_levels": [
2024        "low",
2025        "medium",
2026        "high"
2027      ],
2028      "default_reasoning_effort": "medium",
2029      "supports_attachments": true,
2030      "options": {}
2031    },
2032    {
2033      "id": "openai/o3-deep-research",
2034      "name": "o3-deep-research",
2035      "cost_per_1m_in": 10,
2036      "cost_per_1m_out": 40,
2037      "cost_per_1m_in_cached": 2.5,
2038      "cost_per_1m_out_cached": 0,
2039      "context_window": 200000,
2040      "default_max_tokens": 8000,
2041      "can_reason": true,
2042      "reasoning_levels": [
2043        "low",
2044        "medium",
2045        "high"
2046      ],
2047      "default_reasoning_effort": "medium",
2048      "supports_attachments": true,
2049      "options": {}
2050    },
2051    {
2052      "id": "openai/o3-mini",
2053      "name": "o3-mini",
2054      "cost_per_1m_in": 1.1,
2055      "cost_per_1m_out": 4.4,
2056      "cost_per_1m_in_cached": 0.55,
2057      "cost_per_1m_out_cached": 0,
2058      "context_window": 200000,
2059      "default_max_tokens": 8000,
2060      "can_reason": true,
2061      "reasoning_levels": [
2062        "low",
2063        "medium",
2064        "high"
2065      ],
2066      "default_reasoning_effort": "medium",
2067      "supports_attachments": false,
2068      "options": {}
2069    },
2070    {
2071      "id": "openai/o4-mini",
2072      "name": "o4-mini",
2073      "cost_per_1m_in": 1.1,
2074      "cost_per_1m_out": 4.4,
2075      "cost_per_1m_in_cached": 0.275,
2076      "cost_per_1m_out_cached": 0,
2077      "context_window": 200000,
2078      "default_max_tokens": 8000,
2079      "can_reason": true,
2080      "reasoning_levels": [
2081        "low",
2082        "medium",
2083        "high"
2084      ],
2085      "default_reasoning_effort": "medium",
2086      "supports_attachments": true,
2087      "options": {}
2088    },
2089    {
2090      "id": "vercel/v0-1.0-md",
2091      "name": "v0-1.0-md",
2092      "cost_per_1m_in": 3,
2093      "cost_per_1m_out": 15,
2094      "cost_per_1m_in_cached": 0,
2095      "cost_per_1m_out_cached": 0,
2096      "context_window": 128000,
2097      "default_max_tokens": 8000,
2098      "can_reason": false,
2099      "supports_attachments": true,
2100      "options": {}
2101    },
2102    {
2103      "id": "vercel/v0-1.5-md",
2104      "name": "v0-1.5-md",
2105      "cost_per_1m_in": 3,
2106      "cost_per_1m_out": 15,
2107      "cost_per_1m_in_cached": 0,
2108      "cost_per_1m_out_cached": 0,
2109      "context_window": 128000,
2110      "default_max_tokens": 8000,
2111      "can_reason": false,
2112      "supports_attachments": true,
2113      "options": {}
2114    }
2115  ],
2116  "default_headers": {
2117    "HTTP-Referer": "https://charm.land",
2118    "X-Title": "Crush"
2119  }
2120}