vercel.json

   1{
   2  "name": "Vercel",
   3  "id": "vercel",
   4  "api_key": "$VERCEL_API_KEY",
   5  "api_endpoint": "https://ai-gateway.vercel.sh/v1",
   6  "type": "vercel",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-haiku-4.5",
   9  "models": [
  10    {
  11      "id": "anthropic/claude-3-haiku",
  12      "name": "Claude 3 Haiku",
  13      "cost_per_1m_in": 0.25,
  14      "cost_per_1m_out": 1.25,
  15      "cost_per_1m_in_cached": 0.03,
  16      "cost_per_1m_out_cached": 0.3,
  17      "context_window": 200000,
  18      "default_max_tokens": 4096,
  19      "can_reason": false,
  20      "supports_attachments": true,
  21      "options": {}
  22    },
  23    {
  24      "id": "anthropic/claude-3.5-haiku",
  25      "name": "Claude 3.5 Haiku",
  26      "cost_per_1m_in": 0.8,
  27      "cost_per_1m_out": 4,
  28      "cost_per_1m_in_cached": 0.08,
  29      "cost_per_1m_out_cached": 1,
  30      "context_window": 200000,
  31      "default_max_tokens": 8000,
  32      "can_reason": false,
  33      "supports_attachments": true,
  34      "options": {}
  35    },
  36    {
  37      "id": "anthropic/claude-3.5-sonnet",
  38      "name": "Claude 3.5 Sonnet",
  39      "cost_per_1m_in": 3,
  40      "cost_per_1m_out": 15,
  41      "cost_per_1m_in_cached": 0.3,
  42      "cost_per_1m_out_cached": 3.75,
  43      "context_window": 200000,
  44      "default_max_tokens": 8000,
  45      "can_reason": false,
  46      "supports_attachments": true,
  47      "options": {}
  48    },
  49    {
  50      "id": "anthropic/claude-3.5-sonnet-20240620",
  51      "name": "Claude 3.5 Sonnet (2024-06-20)",
  52      "cost_per_1m_in": 3,
  53      "cost_per_1m_out": 15,
  54      "cost_per_1m_in_cached": 0.3,
  55      "cost_per_1m_out_cached": 3.75,
  56      "context_window": 200000,
  57      "default_max_tokens": 8000,
  58      "can_reason": false,
  59      "supports_attachments": true,
  60      "options": {}
  61    },
  62    {
  63      "id": "anthropic/claude-3.7-sonnet",
  64      "name": "Claude 3.7 Sonnet",
  65      "cost_per_1m_in": 3,
  66      "cost_per_1m_out": 15,
  67      "cost_per_1m_in_cached": 0.3,
  68      "cost_per_1m_out_cached": 3.75,
  69      "context_window": 200000,
  70      "default_max_tokens": 8000,
  71      "can_reason": true,
  72      "reasoning_levels": [
  73        "none",
  74        "minimal",
  75        "low",
  76        "medium",
  77        "high",
  78        "xhigh"
  79      ],
  80      "default_reasoning_effort": "medium",
  81      "supports_attachments": true,
  82      "options": {}
  83    },
  84    {
  85      "id": "anthropic/claude-haiku-4.5",
  86      "name": "Claude Haiku 4.5",
  87      "cost_per_1m_in": 1,
  88      "cost_per_1m_out": 5,
  89      "cost_per_1m_in_cached": 0.1,
  90      "cost_per_1m_out_cached": 1.25,
  91      "context_window": 200000,
  92      "default_max_tokens": 8000,
  93      "can_reason": true,
  94      "reasoning_levels": [
  95        "none",
  96        "minimal",
  97        "low",
  98        "medium",
  99        "high",
 100        "xhigh"
 101      ],
 102      "default_reasoning_effort": "medium",
 103      "supports_attachments": true,
 104      "options": {}
 105    },
 106    {
 107      "id": "anthropic/claude-opus-4",
 108      "name": "Claude Opus 4",
 109      "cost_per_1m_in": 15,
 110      "cost_per_1m_out": 75,
 111      "cost_per_1m_in_cached": 1.5,
 112      "cost_per_1m_out_cached": 18.75,
 113      "context_window": 200000,
 114      "default_max_tokens": 8000,
 115      "can_reason": true,
 116      "reasoning_levels": [
 117        "none",
 118        "minimal",
 119        "low",
 120        "medium",
 121        "high",
 122        "xhigh"
 123      ],
 124      "default_reasoning_effort": "medium",
 125      "supports_attachments": true,
 126      "options": {}
 127    },
 128    {
 129      "id": "anthropic/claude-opus-4.1",
 130      "name": "Claude Opus 4.1",
 131      "cost_per_1m_in": 15,
 132      "cost_per_1m_out": 75,
 133      "cost_per_1m_in_cached": 1.5,
 134      "cost_per_1m_out_cached": 18.75,
 135      "context_window": 200000,
 136      "default_max_tokens": 8000,
 137      "can_reason": true,
 138      "reasoning_levels": [
 139        "none",
 140        "minimal",
 141        "low",
 142        "medium",
 143        "high",
 144        "xhigh"
 145      ],
 146      "default_reasoning_effort": "medium",
 147      "supports_attachments": true,
 148      "options": {}
 149    },
 150    {
 151      "id": "anthropic/claude-opus-4.5",
 152      "name": "Claude Opus 4.5",
 153      "cost_per_1m_in": 5,
 154      "cost_per_1m_out": 25,
 155      "cost_per_1m_in_cached": 0.5,
 156      "cost_per_1m_out_cached": 6.25,
 157      "context_window": 200000,
 158      "default_max_tokens": 8000,
 159      "can_reason": true,
 160      "reasoning_levels": [
 161        "none",
 162        "minimal",
 163        "low",
 164        "medium",
 165        "high",
 166        "xhigh"
 167      ],
 168      "default_reasoning_effort": "medium",
 169      "supports_attachments": true,
 170      "options": {}
 171    },
 172    {
 173      "id": "anthropic/claude-opus-4.6",
 174      "name": "Claude Opus 4.6",
 175      "cost_per_1m_in": 5,
 176      "cost_per_1m_out": 25,
 177      "cost_per_1m_in_cached": 0.5,
 178      "cost_per_1m_out_cached": 6.25,
 179      "context_window": 1000000,
 180      "default_max_tokens": 8000,
 181      "can_reason": true,
 182      "reasoning_levels": [
 183        "none",
 184        "minimal",
 185        "low",
 186        "medium",
 187        "high",
 188        "xhigh"
 189      ],
 190      "default_reasoning_effort": "medium",
 191      "supports_attachments": true,
 192      "options": {}
 193    },
 194    {
 195      "id": "anthropic/claude-sonnet-4",
 196      "name": "Claude Sonnet 4",
 197      "cost_per_1m_in": 3,
 198      "cost_per_1m_out": 15,
 199      "cost_per_1m_in_cached": 0.3,
 200      "cost_per_1m_out_cached": 3.75,
 201      "context_window": 1000000,
 202      "default_max_tokens": 8000,
 203      "can_reason": true,
 204      "reasoning_levels": [
 205        "none",
 206        "minimal",
 207        "low",
 208        "medium",
 209        "high",
 210        "xhigh"
 211      ],
 212      "default_reasoning_effort": "medium",
 213      "supports_attachments": true,
 214      "options": {}
 215    },
 216    {
 217      "id": "anthropic/claude-sonnet-4.5",
 218      "name": "Claude Sonnet 4.5",
 219      "cost_per_1m_in": 3,
 220      "cost_per_1m_out": 15,
 221      "cost_per_1m_in_cached": 0.3,
 222      "cost_per_1m_out_cached": 3.75,
 223      "context_window": 1000000,
 224      "default_max_tokens": 8000,
 225      "can_reason": true,
 226      "reasoning_levels": [
 227        "none",
 228        "minimal",
 229        "low",
 230        "medium",
 231        "high",
 232        "xhigh"
 233      ],
 234      "default_reasoning_effort": "medium",
 235      "supports_attachments": true,
 236      "options": {}
 237    },
 238    {
 239      "id": "anthropic/claude-sonnet-4.6",
 240      "name": "Claude Sonnet 4.6",
 241      "cost_per_1m_in": 3,
 242      "cost_per_1m_out": 15,
 243      "cost_per_1m_in_cached": 0.3,
 244      "cost_per_1m_out_cached": 3.75,
 245      "context_window": 1000000,
 246      "default_max_tokens": 8000,
 247      "can_reason": true,
 248      "reasoning_levels": [
 249        "none",
 250        "minimal",
 251        "low",
 252        "medium",
 253        "high",
 254        "xhigh"
 255      ],
 256      "default_reasoning_effort": "medium",
 257      "supports_attachments": true,
 258      "options": {}
 259    },
 260    {
 261      "id": "cohere/command-a",
 262      "name": "Command A",
 263      "cost_per_1m_in": 2.5,
 264      "cost_per_1m_out": 10,
 265      "cost_per_1m_in_cached": 0,
 266      "cost_per_1m_out_cached": 0,
 267      "context_window": 256000,
 268      "default_max_tokens": 8000,
 269      "can_reason": false,
 270      "supports_attachments": false,
 271      "options": {}
 272    },
 273    {
 274      "id": "deepseek/deepseek-v3",
 275      "name": "DeepSeek V3 0324",
 276      "cost_per_1m_in": 0.77,
 277      "cost_per_1m_out": 0.77,
 278      "cost_per_1m_in_cached": 0,
 279      "cost_per_1m_out_cached": 0,
 280      "context_window": 163840,
 281      "default_max_tokens": 8000,
 282      "can_reason": false,
 283      "supports_attachments": false,
 284      "options": {}
 285    },
 286    {
 287      "id": "deepseek/deepseek-v3.1-terminus",
 288      "name": "DeepSeek V3.1 Terminus",
 289      "cost_per_1m_in": 0.27,
 290      "cost_per_1m_out": 1,
 291      "cost_per_1m_in_cached": 0,
 292      "cost_per_1m_out_cached": 0,
 293      "context_window": 131072,
 294      "default_max_tokens": 8000,
 295      "can_reason": true,
 296      "reasoning_levels": [
 297        "low",
 298        "medium",
 299        "high"
 300      ],
 301      "default_reasoning_effort": "medium",
 302      "supports_attachments": false,
 303      "options": {}
 304    },
 305    {
 306      "id": "deepseek/deepseek-v3.2",
 307      "name": "DeepSeek V3.2",
 308      "cost_per_1m_in": 0.26,
 309      "cost_per_1m_out": 0.38,
 310      "cost_per_1m_in_cached": 0.13,
 311      "cost_per_1m_out_cached": 0,
 312      "context_window": 128000,
 313      "default_max_tokens": 8000,
 314      "can_reason": false,
 315      "supports_attachments": false,
 316      "options": {}
 317    },
 318    {
 319      "id": "deepseek/deepseek-v3.2-thinking",
 320      "name": "DeepSeek V3.2 Thinking",
 321      "cost_per_1m_in": 0.28,
 322      "cost_per_1m_out": 0.42,
 323      "cost_per_1m_in_cached": 0.028,
 324      "cost_per_1m_out_cached": 0,
 325      "context_window": 128000,
 326      "default_max_tokens": 8000,
 327      "can_reason": true,
 328      "reasoning_levels": [
 329        "low",
 330        "medium",
 331        "high"
 332      ],
 333      "default_reasoning_effort": "medium",
 334      "supports_attachments": false,
 335      "options": {}
 336    },
 337    {
 338      "id": "deepseek/deepseek-r1",
 339      "name": "DeepSeek-R1",
 340      "cost_per_1m_in": 1.35,
 341      "cost_per_1m_out": 5.4,
 342      "cost_per_1m_in_cached": 0,
 343      "cost_per_1m_out_cached": 0,
 344      "context_window": 128000,
 345      "default_max_tokens": 8000,
 346      "can_reason": true,
 347      "reasoning_levels": [
 348        "low",
 349        "medium",
 350        "high"
 351      ],
 352      "default_reasoning_effort": "medium",
 353      "supports_attachments": false,
 354      "options": {}
 355    },
 356    {
 357      "id": "deepseek/deepseek-v3.1",
 358      "name": "DeepSeek-V3.1",
 359      "cost_per_1m_in": 0.5,
 360      "cost_per_1m_out": 1.5,
 361      "cost_per_1m_in_cached": 0,
 362      "cost_per_1m_out_cached": 0,
 363      "context_window": 163840,
 364      "default_max_tokens": 8000,
 365      "can_reason": true,
 366      "reasoning_levels": [
 367        "low",
 368        "medium",
 369        "high"
 370      ],
 371      "default_reasoning_effort": "medium",
 372      "supports_attachments": false,
 373      "options": {}
 374    },
 375    {
 376      "id": "mistral/devstral-2",
 377      "name": "Devstral 2",
 378      "cost_per_1m_in": 0,
 379      "cost_per_1m_out": 0,
 380      "cost_per_1m_in_cached": 0,
 381      "cost_per_1m_out_cached": 0,
 382      "context_window": 256000,
 383      "default_max_tokens": 8000,
 384      "can_reason": false,
 385      "supports_attachments": false,
 386      "options": {}
 387    },
 388    {
 389      "id": "mistral/devstral-small",
 390      "name": "Devstral Small 1.1",
 391      "cost_per_1m_in": 0.1,
 392      "cost_per_1m_out": 0.3,
 393      "cost_per_1m_in_cached": 0,
 394      "cost_per_1m_out_cached": 0,
 395      "context_window": 128000,
 396      "default_max_tokens": 8000,
 397      "can_reason": false,
 398      "supports_attachments": false,
 399      "options": {}
 400    },
 401    {
 402      "id": "mistral/devstral-small-2",
 403      "name": "Devstral Small 2",
 404      "cost_per_1m_in": 0,
 405      "cost_per_1m_out": 0,
 406      "cost_per_1m_in_cached": 0,
 407      "cost_per_1m_out_cached": 0,
 408      "context_window": 256000,
 409      "default_max_tokens": 8000,
 410      "can_reason": false,
 411      "supports_attachments": false,
 412      "options": {}
 413    },
 414    {
 415      "id": "zai/glm-4.5-air",
 416      "name": "GLM 4.5 Air",
 417      "cost_per_1m_in": 0.2,
 418      "cost_per_1m_out": 1.1,
 419      "cost_per_1m_in_cached": 0.03,
 420      "cost_per_1m_out_cached": 0,
 421      "context_window": 128000,
 422      "default_max_tokens": 8000,
 423      "can_reason": true,
 424      "reasoning_levels": [
 425        "low",
 426        "medium",
 427        "high"
 428      ],
 429      "default_reasoning_effort": "medium",
 430      "supports_attachments": false,
 431      "options": {}
 432    },
 433    {
 434      "id": "zai/glm-4.5v",
 435      "name": "GLM 4.5V",
 436      "cost_per_1m_in": 0.6,
 437      "cost_per_1m_out": 1.8,
 438      "cost_per_1m_in_cached": 0.11,
 439      "cost_per_1m_out_cached": 0,
 440      "context_window": 66000,
 441      "default_max_tokens": 8000,
 442      "can_reason": false,
 443      "supports_attachments": true,
 444      "options": {}
 445    },
 446    {
 447      "id": "zai/glm-4.6",
 448      "name": "GLM 4.6",
 449      "cost_per_1m_in": 0.45,
 450      "cost_per_1m_out": 1.8,
 451      "cost_per_1m_in_cached": 0.11,
 452      "cost_per_1m_out_cached": 0,
 453      "context_window": 200000,
 454      "default_max_tokens": 8000,
 455      "can_reason": true,
 456      "reasoning_levels": [
 457        "low",
 458        "medium",
 459        "high"
 460      ],
 461      "default_reasoning_effort": "medium",
 462      "supports_attachments": false,
 463      "options": {}
 464    },
 465    {
 466      "id": "zai/glm-4.7",
 467      "name": "GLM 4.7",
 468      "cost_per_1m_in": 0.6,
 469      "cost_per_1m_out": 2.2,
 470      "cost_per_1m_in_cached": 0,
 471      "cost_per_1m_out_cached": 0,
 472      "context_window": 200000,
 473      "default_max_tokens": 8000,
 474      "can_reason": true,
 475      "reasoning_levels": [
 476        "low",
 477        "medium",
 478        "high"
 479      ],
 480      "default_reasoning_effort": "medium",
 481      "supports_attachments": false,
 482      "options": {}
 483    },
 484    {
 485      "id": "zai/glm-4.7-flash",
 486      "name": "GLM 4.7 Flash",
 487      "cost_per_1m_in": 0.07,
 488      "cost_per_1m_out": 0.4,
 489      "cost_per_1m_in_cached": 0,
 490      "cost_per_1m_out_cached": 0,
 491      "context_window": 200000,
 492      "default_max_tokens": 8000,
 493      "can_reason": true,
 494      "reasoning_levels": [
 495        "low",
 496        "medium",
 497        "high"
 498      ],
 499      "default_reasoning_effort": "medium",
 500      "supports_attachments": false,
 501      "options": {}
 502    },
 503    {
 504      "id": "zai/glm-4.7-flashx",
 505      "name": "GLM 4.7 FlashX",
 506      "cost_per_1m_in": 0.06,
 507      "cost_per_1m_out": 0.4,
 508      "cost_per_1m_in_cached": 0.01,
 509      "cost_per_1m_out_cached": 0,
 510      "context_window": 200000,
 511      "default_max_tokens": 8000,
 512      "can_reason": true,
 513      "reasoning_levels": [
 514        "low",
 515        "medium",
 516        "high"
 517      ],
 518      "default_reasoning_effort": "medium",
 519      "supports_attachments": false,
 520      "options": {}
 521    },
 522    {
 523      "id": "zai/glm-5",
 524      "name": "GLM 5",
 525      "cost_per_1m_in": 1,
 526      "cost_per_1m_out": 3.2,
 527      "cost_per_1m_in_cached": 0.2,
 528      "cost_per_1m_out_cached": 0,
 529      "context_window": 202800,
 530      "default_max_tokens": 8000,
 531      "can_reason": true,
 532      "reasoning_levels": [
 533        "low",
 534        "medium",
 535        "high"
 536      ],
 537      "default_reasoning_effort": "medium",
 538      "supports_attachments": false,
 539      "options": {}
 540    },
 541    {
 542      "id": "zai/glm-4.5",
 543      "name": "GLM-4.5",
 544      "cost_per_1m_in": 0.6,
 545      "cost_per_1m_out": 2.2,
 546      "cost_per_1m_in_cached": 0.11,
 547      "cost_per_1m_out_cached": 0,
 548      "context_window": 128000,
 549      "default_max_tokens": 8000,
 550      "can_reason": true,
 551      "reasoning_levels": [
 552        "low",
 553        "medium",
 554        "high"
 555      ],
 556      "default_reasoning_effort": "medium",
 557      "supports_attachments": false,
 558      "options": {}
 559    },
 560    {
 561      "id": "zai/glm-4.6v",
 562      "name": "GLM-4.6V",
 563      "cost_per_1m_in": 0.3,
 564      "cost_per_1m_out": 0.9,
 565      "cost_per_1m_in_cached": 0.05,
 566      "cost_per_1m_out_cached": 0,
 567      "context_window": 128000,
 568      "default_max_tokens": 8000,
 569      "can_reason": true,
 570      "reasoning_levels": [
 571        "low",
 572        "medium",
 573        "high"
 574      ],
 575      "default_reasoning_effort": "medium",
 576      "supports_attachments": true,
 577      "options": {}
 578    },
 579    {
 580      "id": "zai/glm-4.6v-flash",
 581      "name": "GLM-4.6V-Flash",
 582      "cost_per_1m_in": 0,
 583      "cost_per_1m_out": 0,
 584      "cost_per_1m_in_cached": 0,
 585      "cost_per_1m_out_cached": 0,
 586      "context_window": 128000,
 587      "default_max_tokens": 8000,
 588      "can_reason": true,
 589      "reasoning_levels": [
 590        "low",
 591        "medium",
 592        "high"
 593      ],
 594      "default_reasoning_effort": "medium",
 595      "supports_attachments": true,
 596      "options": {}
 597    },
 598    {
 599      "id": "openai/gpt-5-chat",
 600      "name": "GPT 5 Chat",
 601      "cost_per_1m_in": 1.25,
 602      "cost_per_1m_out": 10,
 603      "cost_per_1m_in_cached": 0.125,
 604      "cost_per_1m_out_cached": 0,
 605      "context_window": 128000,
 606      "default_max_tokens": 8000,
 607      "can_reason": true,
 608      "reasoning_levels": [
 609        "low",
 610        "medium",
 611        "high"
 612      ],
 613      "default_reasoning_effort": "medium",
 614      "supports_attachments": true,
 615      "options": {}
 616    },
 617    {
 618      "id": "openai/gpt-5.1-codex-max",
 619      "name": "GPT 5.1 Codex Max",
 620      "cost_per_1m_in": 1.25,
 621      "cost_per_1m_out": 10,
 622      "cost_per_1m_in_cached": 0.125,
 623      "cost_per_1m_out_cached": 0,
 624      "context_window": 400000,
 625      "default_max_tokens": 8000,
 626      "can_reason": true,
 627      "reasoning_levels": [
 628        "low",
 629        "medium",
 630        "high"
 631      ],
 632      "default_reasoning_effort": "medium",
 633      "supports_attachments": true,
 634      "options": {}
 635    },
 636    {
 637      "id": "openai/gpt-5.1-codex-mini",
 638      "name": "GPT 5.1 Codex Mini",
 639      "cost_per_1m_in": 0.25,
 640      "cost_per_1m_out": 2,
 641      "cost_per_1m_in_cached": 0.025,
 642      "cost_per_1m_out_cached": 0,
 643      "context_window": 400000,
 644      "default_max_tokens": 8000,
 645      "can_reason": true,
 646      "reasoning_levels": [
 647        "low",
 648        "medium",
 649        "high"
 650      ],
 651      "default_reasoning_effort": "medium",
 652      "supports_attachments": true,
 653      "options": {}
 654    },
 655    {
 656      "id": "openai/gpt-5.1-thinking",
 657      "name": "GPT 5.1 Thinking",
 658      "cost_per_1m_in": 1.25,
 659      "cost_per_1m_out": 10,
 660      "cost_per_1m_in_cached": 0.125,
 661      "cost_per_1m_out_cached": 0,
 662      "context_window": 400000,
 663      "default_max_tokens": 8000,
 664      "can_reason": true,
 665      "reasoning_levels": [
 666        "low",
 667        "medium",
 668        "high"
 669      ],
 670      "default_reasoning_effort": "medium",
 671      "supports_attachments": true,
 672      "options": {}
 673    },
 674    {
 675      "id": "openai/gpt-5.2",
 676      "name": "GPT 5.2",
 677      "cost_per_1m_in": 1.75,
 678      "cost_per_1m_out": 14,
 679      "cost_per_1m_in_cached": 0.175,
 680      "cost_per_1m_out_cached": 0,
 681      "context_window": 400000,
 682      "default_max_tokens": 8000,
 683      "can_reason": true,
 684      "reasoning_levels": [
 685        "low",
 686        "medium",
 687        "high"
 688      ],
 689      "default_reasoning_effort": "medium",
 690      "supports_attachments": true,
 691      "options": {}
 692    },
 693    {
 694      "id": "openai/gpt-5.2-pro",
 695      "name": "GPT 5.2 ",
 696      "cost_per_1m_in": 21,
 697      "cost_per_1m_out": 168,
 698      "cost_per_1m_in_cached": 0,
 699      "cost_per_1m_out_cached": 0,
 700      "context_window": 400000,
 701      "default_max_tokens": 8000,
 702      "can_reason": true,
 703      "reasoning_levels": [
 704        "low",
 705        "medium",
 706        "high"
 707      ],
 708      "default_reasoning_effort": "medium",
 709      "supports_attachments": true,
 710      "options": {}
 711    },
 712    {
 713      "id": "openai/gpt-5.2-chat",
 714      "name": "GPT 5.2 Chat",
 715      "cost_per_1m_in": 1.75,
 716      "cost_per_1m_out": 14,
 717      "cost_per_1m_in_cached": 0.175,
 718      "cost_per_1m_out_cached": 0,
 719      "context_window": 128000,
 720      "default_max_tokens": 8000,
 721      "can_reason": true,
 722      "reasoning_levels": [
 723        "low",
 724        "medium",
 725        "high"
 726      ],
 727      "default_reasoning_effort": "medium",
 728      "supports_attachments": true,
 729      "options": {}
 730    },
 731    {
 732      "id": "openai/gpt-5.2-codex",
 733      "name": "GPT 5.2 Codex",
 734      "cost_per_1m_in": 1.75,
 735      "cost_per_1m_out": 14,
 736      "cost_per_1m_in_cached": 0.175,
 737      "cost_per_1m_out_cached": 0,
 738      "context_window": 400000,
 739      "default_max_tokens": 8000,
 740      "can_reason": true,
 741      "reasoning_levels": [
 742        "low",
 743        "medium",
 744        "high"
 745      ],
 746      "default_reasoning_effort": "medium",
 747      "supports_attachments": true,
 748      "options": {}
 749    },
 750    {
 751      "id": "openai/gpt-5.3-codex",
 752      "name": "GPT 5.3 Codex",
 753      "cost_per_1m_in": 1.75,
 754      "cost_per_1m_out": 14,
 755      "cost_per_1m_in_cached": 0.175,
 756      "cost_per_1m_out_cached": 0,
 757      "context_window": 400000,
 758      "default_max_tokens": 8000,
 759      "can_reason": true,
 760      "reasoning_levels": [
 761        "low",
 762        "medium",
 763        "high"
 764      ],
 765      "default_reasoning_effort": "medium",
 766      "supports_attachments": true,
 767      "options": {}
 768    },
 769    {
 770      "id": "openai/gpt-5.4",
 771      "name": "GPT 5.4",
 772      "cost_per_1m_in": 2.5,
 773      "cost_per_1m_out": 15,
 774      "cost_per_1m_in_cached": 0.25,
 775      "cost_per_1m_out_cached": 0,
 776      "context_window": 1050000,
 777      "default_max_tokens": 8000,
 778      "can_reason": true,
 779      "reasoning_levels": [
 780        "low",
 781        "medium",
 782        "high"
 783      ],
 784      "default_reasoning_effort": "medium",
 785      "supports_attachments": true,
 786      "options": {}
 787    },
 788    {
 789      "id": "openai/gpt-5.4-pro",
 790      "name": "GPT 5.4 Pro",
 791      "cost_per_1m_in": 30,
 792      "cost_per_1m_out": 180,
 793      "cost_per_1m_in_cached": 0,
 794      "cost_per_1m_out_cached": 0,
 795      "context_window": 1050000,
 796      "default_max_tokens": 8000,
 797      "can_reason": true,
 798      "reasoning_levels": [
 799        "low",
 800        "medium",
 801        "high"
 802      ],
 803      "default_reasoning_effort": "medium",
 804      "supports_attachments": true,
 805      "options": {}
 806    },
 807    {
 808      "id": "openai/gpt-4-turbo",
 809      "name": "GPT-4 Turbo",
 810      "cost_per_1m_in": 10,
 811      "cost_per_1m_out": 30,
 812      "cost_per_1m_in_cached": 0,
 813      "cost_per_1m_out_cached": 0,
 814      "context_window": 128000,
 815      "default_max_tokens": 4096,
 816      "can_reason": false,
 817      "supports_attachments": true,
 818      "options": {}
 819    },
 820    {
 821      "id": "openai/gpt-4.1",
 822      "name": "GPT-4.1",
 823      "cost_per_1m_in": 2,
 824      "cost_per_1m_out": 8,
 825      "cost_per_1m_in_cached": 0.5,
 826      "cost_per_1m_out_cached": 0,
 827      "context_window": 1047576,
 828      "default_max_tokens": 8000,
 829      "can_reason": false,
 830      "supports_attachments": true,
 831      "options": {}
 832    },
 833    {
 834      "id": "openai/gpt-4.1-mini",
 835      "name": "GPT-4.1 mini",
 836      "cost_per_1m_in": 0.4,
 837      "cost_per_1m_out": 1.6,
 838      "cost_per_1m_in_cached": 0.1,
 839      "cost_per_1m_out_cached": 0,
 840      "context_window": 1047576,
 841      "default_max_tokens": 8000,
 842      "can_reason": false,
 843      "supports_attachments": true,
 844      "options": {}
 845    },
 846    {
 847      "id": "openai/gpt-4.1-nano",
 848      "name": "GPT-4.1 nano",
 849      "cost_per_1m_in": 0.1,
 850      "cost_per_1m_out": 0.4,
 851      "cost_per_1m_in_cached": 0.025,
 852      "cost_per_1m_out_cached": 0,
 853      "context_window": 1047576,
 854      "default_max_tokens": 8000,
 855      "can_reason": false,
 856      "supports_attachments": true,
 857      "options": {}
 858    },
 859    {
 860      "id": "openai/gpt-4o",
 861      "name": "GPT-4o",
 862      "cost_per_1m_in": 2.5,
 863      "cost_per_1m_out": 10,
 864      "cost_per_1m_in_cached": 1.25,
 865      "cost_per_1m_out_cached": 0,
 866      "context_window": 128000,
 867      "default_max_tokens": 8000,
 868      "can_reason": false,
 869      "supports_attachments": true,
 870      "options": {}
 871    },
 872    {
 873      "id": "openai/gpt-4o-mini",
 874      "name": "GPT-4o mini",
 875      "cost_per_1m_in": 0.15,
 876      "cost_per_1m_out": 0.6,
 877      "cost_per_1m_in_cached": 0.075,
 878      "cost_per_1m_out_cached": 0,
 879      "context_window": 128000,
 880      "default_max_tokens": 8000,
 881      "can_reason": false,
 882      "supports_attachments": true,
 883      "options": {}
 884    },
 885    {
 886      "id": "openai/gpt-5",
 887      "name": "GPT-5",
 888      "cost_per_1m_in": 1.25,
 889      "cost_per_1m_out": 10,
 890      "cost_per_1m_in_cached": 0.125,
 891      "cost_per_1m_out_cached": 0,
 892      "context_window": 400000,
 893      "default_max_tokens": 8000,
 894      "can_reason": true,
 895      "reasoning_levels": [
 896        "low",
 897        "medium",
 898        "high"
 899      ],
 900      "default_reasoning_effort": "medium",
 901      "supports_attachments": true,
 902      "options": {}
 903    },
 904    {
 905      "id": "openai/gpt-5-mini",
 906      "name": "GPT-5 mini",
 907      "cost_per_1m_in": 0.25,
 908      "cost_per_1m_out": 2,
 909      "cost_per_1m_in_cached": 0.025,
 910      "cost_per_1m_out_cached": 0,
 911      "context_window": 400000,
 912      "default_max_tokens": 8000,
 913      "can_reason": true,
 914      "reasoning_levels": [
 915        "low",
 916        "medium",
 917        "high"
 918      ],
 919      "default_reasoning_effort": "medium",
 920      "supports_attachments": true,
 921      "options": {}
 922    },
 923    {
 924      "id": "openai/gpt-5-nano",
 925      "name": "GPT-5 nano",
 926      "cost_per_1m_in": 0.05,
 927      "cost_per_1m_out": 0.4,
 928      "cost_per_1m_in_cached": 0.005,
 929      "cost_per_1m_out_cached": 0,
 930      "context_window": 400000,
 931      "default_max_tokens": 8000,
 932      "can_reason": true,
 933      "reasoning_levels": [
 934        "low",
 935        "medium",
 936        "high"
 937      ],
 938      "default_reasoning_effort": "medium",
 939      "supports_attachments": true,
 940      "options": {}
 941    },
 942    {
 943      "id": "openai/gpt-5-pro",
 944      "name": "GPT-5 pro",
 945      "cost_per_1m_in": 15,
 946      "cost_per_1m_out": 120,
 947      "cost_per_1m_in_cached": 0,
 948      "cost_per_1m_out_cached": 0,
 949      "context_window": 400000,
 950      "default_max_tokens": 8000,
 951      "can_reason": true,
 952      "reasoning_levels": [
 953        "low",
 954        "medium",
 955        "high"
 956      ],
 957      "default_reasoning_effort": "medium",
 958      "supports_attachments": true,
 959      "options": {}
 960    },
 961    {
 962      "id": "openai/gpt-5-codex",
 963      "name": "GPT-5-Codex",
 964      "cost_per_1m_in": 1.25,
 965      "cost_per_1m_out": 10,
 966      "cost_per_1m_in_cached": 0.125,
 967      "cost_per_1m_out_cached": 0,
 968      "context_window": 400000,
 969      "default_max_tokens": 8000,
 970      "can_reason": true,
 971      "reasoning_levels": [
 972        "low",
 973        "medium",
 974        "high"
 975      ],
 976      "default_reasoning_effort": "medium",
 977      "supports_attachments": false,
 978      "options": {}
 979    },
 980    {
 981      "id": "openai/gpt-5.1-instant",
 982      "name": "GPT-5.1 Instant",
 983      "cost_per_1m_in": 1.25,
 984      "cost_per_1m_out": 10,
 985      "cost_per_1m_in_cached": 0.125,
 986      "cost_per_1m_out_cached": 0,
 987      "context_window": 128000,
 988      "default_max_tokens": 8000,
 989      "can_reason": true,
 990      "reasoning_levels": [
 991        "low",
 992        "medium",
 993        "high"
 994      ],
 995      "default_reasoning_effort": "medium",
 996      "supports_attachments": true,
 997      "options": {}
 998    },
 999    {
1000      "id": "openai/gpt-5.1-codex",
1001      "name": "GPT-5.1-Codex",
1002      "cost_per_1m_in": 1.25,
1003      "cost_per_1m_out": 10,
1004      "cost_per_1m_in_cached": 0.125,
1005      "cost_per_1m_out_cached": 0,
1006      "context_window": 400000,
1007      "default_max_tokens": 8000,
1008      "can_reason": true,
1009      "reasoning_levels": [
1010        "low",
1011        "medium",
1012        "high"
1013      ],
1014      "default_reasoning_effort": "medium",
1015      "supports_attachments": true,
1016      "options": {}
1017    },
1018    {
1019      "id": "openai/gpt-5.3-chat",
1020      "name": "GPT-5.3 Chat",
1021      "cost_per_1m_in": 1.75,
1022      "cost_per_1m_out": 14,
1023      "cost_per_1m_in_cached": 0.175,
1024      "cost_per_1m_out_cached": 0,
1025      "context_window": 128000,
1026      "default_max_tokens": 8000,
1027      "can_reason": true,
1028      "reasoning_levels": [
1029        "low",
1030        "medium",
1031        "high"
1032      ],
1033      "default_reasoning_effort": "medium",
1034      "supports_attachments": true,
1035      "options": {}
1036    },
1037    {
1038      "id": "google/gemini-2.0-flash",
1039      "name": "Gemini 2.0 Flash",
1040      "cost_per_1m_in": 0.15,
1041      "cost_per_1m_out": 0.6,
1042      "cost_per_1m_in_cached": 0,
1043      "cost_per_1m_out_cached": 0,
1044      "context_window": 1048576,
1045      "default_max_tokens": 8000,
1046      "can_reason": false,
1047      "supports_attachments": true,
1048      "options": {}
1049    },
1050    {
1051      "id": "google/gemini-2.0-flash-lite",
1052      "name": "Gemini 2.0 Flash Lite",
1053      "cost_per_1m_in": 0.075,
1054      "cost_per_1m_out": 0.3,
1055      "cost_per_1m_in_cached": 0,
1056      "cost_per_1m_out_cached": 0,
1057      "context_window": 1048576,
1058      "default_max_tokens": 8000,
1059      "can_reason": false,
1060      "supports_attachments": true,
1061      "options": {}
1062    },
1063    {
1064      "id": "google/gemini-2.5-flash",
1065      "name": "Gemini 2.5 Flash",
1066      "cost_per_1m_in": 0.3,
1067      "cost_per_1m_out": 2.5,
1068      "cost_per_1m_in_cached": 0.03,
1069      "cost_per_1m_out_cached": 0,
1070      "context_window": 1000000,
1071      "default_max_tokens": 8000,
1072      "can_reason": true,
1073      "reasoning_levels": [
1074        "low",
1075        "medium",
1076        "high"
1077      ],
1078      "default_reasoning_effort": "medium",
1079      "supports_attachments": true,
1080      "options": {}
1081    },
1082    {
1083      "id": "google/gemini-2.5-flash-lite",
1084      "name": "Gemini 2.5 Flash Lite",
1085      "cost_per_1m_in": 0.1,
1086      "cost_per_1m_out": 0.4,
1087      "cost_per_1m_in_cached": 0.01,
1088      "cost_per_1m_out_cached": 0,
1089      "context_window": 1048576,
1090      "default_max_tokens": 8000,
1091      "can_reason": true,
1092      "reasoning_levels": [
1093        "low",
1094        "medium",
1095        "high"
1096      ],
1097      "default_reasoning_effort": "medium",
1098      "supports_attachments": true,
1099      "options": {}
1100    },
1101    {
1102      "id": "google/gemini-2.5-flash-lite-preview-09-2025",
1103      "name": "Gemini 2.5 Flash Lite Preview 09-2025",
1104      "cost_per_1m_in": 0.1,
1105      "cost_per_1m_out": 0.4,
1106      "cost_per_1m_in_cached": 0.01,
1107      "cost_per_1m_out_cached": 0,
1108      "context_window": 1048576,
1109      "default_max_tokens": 8000,
1110      "can_reason": true,
1111      "reasoning_levels": [
1112        "low",
1113        "medium",
1114        "high"
1115      ],
1116      "default_reasoning_effort": "medium",
1117      "supports_attachments": true,
1118      "options": {}
1119    },
1120    {
1121      "id": "google/gemini-2.5-flash-preview-09-2025",
1122      "name": "Gemini 2.5 Flash Preview 09-2025",
1123      "cost_per_1m_in": 0.3,
1124      "cost_per_1m_out": 2.5,
1125      "cost_per_1m_in_cached": 0.03,
1126      "cost_per_1m_out_cached": 0,
1127      "context_window": 1000000,
1128      "default_max_tokens": 8000,
1129      "can_reason": true,
1130      "reasoning_levels": [
1131        "low",
1132        "medium",
1133        "high"
1134      ],
1135      "default_reasoning_effort": "medium",
1136      "supports_attachments": true,
1137      "options": {}
1138    },
1139    {
1140      "id": "google/gemini-2.5-pro",
1141      "name": "Gemini 2.5 Pro",
1142      "cost_per_1m_in": 1.25,
1143      "cost_per_1m_out": 10,
1144      "cost_per_1m_in_cached": 0.125,
1145      "cost_per_1m_out_cached": 0,
1146      "context_window": 1048576,
1147      "default_max_tokens": 8000,
1148      "can_reason": true,
1149      "reasoning_levels": [
1150        "low",
1151        "medium",
1152        "high"
1153      ],
1154      "default_reasoning_effort": "medium",
1155      "supports_attachments": true,
1156      "options": {}
1157    },
1158    {
1159      "id": "google/gemini-3-flash",
1160      "name": "Gemini 3 Flash",
1161      "cost_per_1m_in": 0.5,
1162      "cost_per_1m_out": 3,
1163      "cost_per_1m_in_cached": 0.05,
1164      "cost_per_1m_out_cached": 0,
1165      "context_window": 1000000,
1166      "default_max_tokens": 8000,
1167      "can_reason": true,
1168      "reasoning_levels": [
1169        "low",
1170        "medium",
1171        "high"
1172      ],
1173      "default_reasoning_effort": "medium",
1174      "supports_attachments": true,
1175      "options": {}
1176    },
1177    {
1178      "id": "google/gemini-3-pro-preview",
1179      "name": "Gemini 3 Pro Preview",
1180      "cost_per_1m_in": 2,
1181      "cost_per_1m_out": 12,
1182      "cost_per_1m_in_cached": 0.2,
1183      "cost_per_1m_out_cached": 0,
1184      "context_window": 1000000,
1185      "default_max_tokens": 8000,
1186      "can_reason": true,
1187      "reasoning_levels": [
1188        "low",
1189        "medium",
1190        "high"
1191      ],
1192      "default_reasoning_effort": "medium",
1193      "supports_attachments": true,
1194      "options": {}
1195    },
1196    {
1197      "id": "google/gemini-3.1-flash-lite-preview",
1198      "name": "Gemini 3.1 Flash Lite Preview",
1199      "cost_per_1m_in": 0.25,
1200      "cost_per_1m_out": 1.5,
1201      "cost_per_1m_in_cached": 0,
1202      "cost_per_1m_out_cached": 0,
1203      "context_window": 1000000,
1204      "default_max_tokens": 8000,
1205      "can_reason": true,
1206      "reasoning_levels": [
1207        "low",
1208        "medium",
1209        "high"
1210      ],
1211      "default_reasoning_effort": "medium",
1212      "supports_attachments": true,
1213      "options": {}
1214    },
1215    {
1216      "id": "google/gemini-3.1-pro-preview",
1217      "name": "Gemini 3.1 Pro Preview",
1218      "cost_per_1m_in": 2,
1219      "cost_per_1m_out": 12,
1220      "cost_per_1m_in_cached": 0.2,
1221      "cost_per_1m_out_cached": 0,
1222      "context_window": 1000000,
1223      "default_max_tokens": 8000,
1224      "can_reason": true,
1225      "reasoning_levels": [
1226        "low",
1227        "medium",
1228        "high"
1229      ],
1230      "default_reasoning_effort": "medium",
1231      "supports_attachments": true,
1232      "options": {}
1233    },
1234    {
1235      "id": "xai/grok-2-vision",
1236      "name": "Grok 2 Vision",
1237      "cost_per_1m_in": 2,
1238      "cost_per_1m_out": 10,
1239      "cost_per_1m_in_cached": 0,
1240      "cost_per_1m_out_cached": 0,
1241      "context_window": 32768,
1242      "default_max_tokens": 8000,
1243      "can_reason": false,
1244      "supports_attachments": true,
1245      "options": {}
1246    },
1247    {
1248      "id": "xai/grok-3",
1249      "name": "Grok 3 Beta",
1250      "cost_per_1m_in": 3,
1251      "cost_per_1m_out": 15,
1252      "cost_per_1m_in_cached": 0,
1253      "cost_per_1m_out_cached": 0,
1254      "context_window": 131072,
1255      "default_max_tokens": 8000,
1256      "can_reason": false,
1257      "supports_attachments": false,
1258      "options": {}
1259    },
1260    {
1261      "id": "xai/grok-3-fast",
1262      "name": "Grok 3 Fast Beta",
1263      "cost_per_1m_in": 5,
1264      "cost_per_1m_out": 25,
1265      "cost_per_1m_in_cached": 0,
1266      "cost_per_1m_out_cached": 0,
1267      "context_window": 131072,
1268      "default_max_tokens": 8000,
1269      "can_reason": false,
1270      "supports_attachments": false,
1271      "options": {}
1272    },
1273    {
1274      "id": "xai/grok-3-mini",
1275      "name": "Grok 3 Mini Beta",
1276      "cost_per_1m_in": 0.3,
1277      "cost_per_1m_out": 0.5,
1278      "cost_per_1m_in_cached": 0,
1279      "cost_per_1m_out_cached": 0,
1280      "context_window": 131072,
1281      "default_max_tokens": 8000,
1282      "can_reason": false,
1283      "supports_attachments": false,
1284      "options": {}
1285    },
1286    {
1287      "id": "xai/grok-3-mini-fast",
1288      "name": "Grok 3 Mini Fast Beta",
1289      "cost_per_1m_in": 0.6,
1290      "cost_per_1m_out": 4,
1291      "cost_per_1m_in_cached": 0,
1292      "cost_per_1m_out_cached": 0,
1293      "context_window": 131072,
1294      "default_max_tokens": 8000,
1295      "can_reason": false,
1296      "supports_attachments": false,
1297      "options": {}
1298    },
1299    {
1300      "id": "xai/grok-4",
1301      "name": "Grok 4",
1302      "cost_per_1m_in": 3,
1303      "cost_per_1m_out": 15,
1304      "cost_per_1m_in_cached": 0.75,
1305      "cost_per_1m_out_cached": 0,
1306      "context_window": 256000,
1307      "default_max_tokens": 8000,
1308      "can_reason": true,
1309      "reasoning_levels": [
1310        "low",
1311        "medium",
1312        "high"
1313      ],
1314      "default_reasoning_effort": "medium",
1315      "supports_attachments": true,
1316      "options": {}
1317    },
1318    {
1319      "id": "xai/grok-4-fast-non-reasoning",
1320      "name": "Grok 4 Fast Non-Reasoning",
1321      "cost_per_1m_in": 0.2,
1322      "cost_per_1m_out": 0.5,
1323      "cost_per_1m_in_cached": 0.05,
1324      "cost_per_1m_out_cached": 0,
1325      "context_window": 2000000,
1326      "default_max_tokens": 8000,
1327      "can_reason": false,
1328      "supports_attachments": false,
1329      "options": {}
1330    },
1331    {
1332      "id": "xai/grok-4-fast-reasoning",
1333      "name": "Grok 4 Fast Reasoning",
1334      "cost_per_1m_in": 0.2,
1335      "cost_per_1m_out": 0.5,
1336      "cost_per_1m_in_cached": 0.05,
1337      "cost_per_1m_out_cached": 0,
1338      "context_window": 2000000,
1339      "default_max_tokens": 8000,
1340      "can_reason": true,
1341      "reasoning_levels": [
1342        "low",
1343        "medium",
1344        "high"
1345      ],
1346      "default_reasoning_effort": "medium",
1347      "supports_attachments": false,
1348      "options": {}
1349    },
1350    {
1351      "id": "xai/grok-4.1-fast-non-reasoning",
1352      "name": "Grok 4.1 Fast Non-Reasoning",
1353      "cost_per_1m_in": 0.2,
1354      "cost_per_1m_out": 0.5,
1355      "cost_per_1m_in_cached": 0.05,
1356      "cost_per_1m_out_cached": 0,
1357      "context_window": 2000000,
1358      "default_max_tokens": 8000,
1359      "can_reason": false,
1360      "supports_attachments": false,
1361      "options": {}
1362    },
1363    {
1364      "id": "xai/grok-4.1-fast-reasoning",
1365      "name": "Grok 4.1 Fast Reasoning",
1366      "cost_per_1m_in": 0.2,
1367      "cost_per_1m_out": 0.5,
1368      "cost_per_1m_in_cached": 0.05,
1369      "cost_per_1m_out_cached": 0,
1370      "context_window": 2000000,
1371      "default_max_tokens": 8000,
1372      "can_reason": true,
1373      "reasoning_levels": [
1374        "low",
1375        "medium",
1376        "high"
1377      ],
1378      "default_reasoning_effort": "medium",
1379      "supports_attachments": false,
1380      "options": {}
1381    },
1382    {
1383      "id": "xai/grok-4.20-non-reasoning-beta",
1384      "name": "Grok 4.20 Beta Non-Reasoning",
1385      "cost_per_1m_in": 2,
1386      "cost_per_1m_out": 6,
1387      "cost_per_1m_in_cached": 0.2,
1388      "cost_per_1m_out_cached": 0,
1389      "context_window": 2000000,
1390      "default_max_tokens": 8000,
1391      "can_reason": false,
1392      "supports_attachments": true,
1393      "options": {}
1394    },
1395    {
1396      "id": "xai/grok-4.20-reasoning-beta",
1397      "name": "Grok 4.20 Beta Reasoning",
1398      "cost_per_1m_in": 2,
1399      "cost_per_1m_out": 6,
1400      "cost_per_1m_in_cached": 0.2,
1401      "cost_per_1m_out_cached": 0,
1402      "context_window": 2000000,
1403      "default_max_tokens": 8000,
1404      "can_reason": true,
1405      "reasoning_levels": [
1406        "low",
1407        "medium",
1408        "high"
1409      ],
1410      "default_reasoning_effort": "medium",
1411      "supports_attachments": true,
1412      "options": {}
1413    },
1414    {
1415      "id": "xai/grok-4.20-multi-agent-beta",
1416      "name": "Grok 4.20 Multi Agent Beta",
1417      "cost_per_1m_in": 2,
1418      "cost_per_1m_out": 6,
1419      "cost_per_1m_in_cached": 0.2,
1420      "cost_per_1m_out_cached": 0,
1421      "context_window": 2000000,
1422      "default_max_tokens": 8000,
1423      "can_reason": true,
1424      "reasoning_levels": [
1425        "low",
1426        "medium",
1427        "high"
1428      ],
1429      "default_reasoning_effort": "medium",
1430      "supports_attachments": false,
1431      "options": {}
1432    },
1433    {
1434      "id": "xai/grok-code-fast-1",
1435      "name": "Grok Code Fast 1",
1436      "cost_per_1m_in": 0.2,
1437      "cost_per_1m_out": 1.5,
1438      "cost_per_1m_in_cached": 0.02,
1439      "cost_per_1m_out_cached": 0,
1440      "context_window": 256000,
1441      "default_max_tokens": 8000,
1442      "can_reason": true,
1443      "reasoning_levels": [
1444        "low",
1445        "medium",
1446        "high"
1447      ],
1448      "default_reasoning_effort": "medium",
1449      "supports_attachments": false,
1450      "options": {}
1451    },
1452    {
1453      "id": "prime-intellect/intellect-3",
1454      "name": "INTELLECT 3",
1455      "cost_per_1m_in": 0.2,
1456      "cost_per_1m_out": 1.1,
1457      "cost_per_1m_in_cached": 0,
1458      "cost_per_1m_out_cached": 0,
1459      "context_window": 131072,
1460      "default_max_tokens": 8000,
1461      "can_reason": true,
1462      "reasoning_levels": [
1463        "low",
1464        "medium",
1465        "high"
1466      ],
1467      "default_reasoning_effort": "medium",
1468      "supports_attachments": false,
1469      "options": {}
1470    },
1471    {
1472      "id": "moonshotai/kimi-k2",
1473      "name": "Kimi K2",
1474      "cost_per_1m_in": 0.6,
1475      "cost_per_1m_out": 2.5,
1476      "cost_per_1m_in_cached": 0,
1477      "cost_per_1m_out_cached": 0,
1478      "context_window": 131072,
1479      "default_max_tokens": 8000,
1480      "can_reason": false,
1481      "supports_attachments": false,
1482      "options": {}
1483    },
1484    {
1485      "id": "moonshotai/kimi-k2-0905",
1486      "name": "Kimi K2 0905",
1487      "cost_per_1m_in": 0.6,
1488      "cost_per_1m_out": 2.5,
1489      "cost_per_1m_in_cached": 0,
1490      "cost_per_1m_out_cached": 0,
1491      "context_window": 256000,
1492      "default_max_tokens": 8000,
1493      "can_reason": false,
1494      "supports_attachments": false,
1495      "options": {}
1496    },
1497    {
1498      "id": "moonshotai/kimi-k2-thinking",
1499      "name": "Kimi K2 Thinking",
1500      "cost_per_1m_in": 0.6,
1501      "cost_per_1m_out": 2.5,
1502      "cost_per_1m_in_cached": 0.15,
1503      "cost_per_1m_out_cached": 0,
1504      "context_window": 262114,
1505      "default_max_tokens": 8000,
1506      "can_reason": true,
1507      "reasoning_levels": [
1508        "low",
1509        "medium",
1510        "high"
1511      ],
1512      "default_reasoning_effort": "medium",
1513      "supports_attachments": false,
1514      "options": {}
1515    },
1516    {
1517      "id": "moonshotai/kimi-k2-thinking-turbo",
1518      "name": "Kimi K2 Thinking Turbo",
1519      "cost_per_1m_in": 1.15,
1520      "cost_per_1m_out": 8,
1521      "cost_per_1m_in_cached": 0.15,
1522      "cost_per_1m_out_cached": 0,
1523      "context_window": 262114,
1524      "default_max_tokens": 8000,
1525      "can_reason": true,
1526      "reasoning_levels": [
1527        "low",
1528        "medium",
1529        "high"
1530      ],
1531      "default_reasoning_effort": "medium",
1532      "supports_attachments": false,
1533      "options": {}
1534    },
1535    {
1536      "id": "moonshotai/kimi-k2-turbo",
1537      "name": "Kimi K2 Turbo",
1538      "cost_per_1m_in": 2.4,
1539      "cost_per_1m_out": 10,
1540      "cost_per_1m_in_cached": 0,
1541      "cost_per_1m_out_cached": 0,
1542      "context_window": 256000,
1543      "default_max_tokens": 8000,
1544      "can_reason": false,
1545      "supports_attachments": false,
1546      "options": {}
1547    },
1548    {
1549      "id": "moonshotai/kimi-k2.5",
1550      "name": "Kimi K2.5",
1551      "cost_per_1m_in": 0.6,
1552      "cost_per_1m_out": 3,
1553      "cost_per_1m_in_cached": 0.1,
1554      "cost_per_1m_out_cached": 0,
1555      "context_window": 262114,
1556      "default_max_tokens": 8000,
1557      "can_reason": true,
1558      "reasoning_levels": [
1559        "low",
1560        "medium",
1561        "high"
1562      ],
1563      "default_reasoning_effort": "medium",
1564      "supports_attachments": true,
1565      "options": {}
1566    },
1567    {
1568      "id": "meta/llama-3.1-70b",
1569      "name": "Llama 3.1 70B Instruct",
1570      "cost_per_1m_in": 0.72,
1571      "cost_per_1m_out": 0.72,
1572      "cost_per_1m_in_cached": 0,
1573      "cost_per_1m_out_cached": 0,
1574      "context_window": 128000,
1575      "default_max_tokens": 8000,
1576      "can_reason": false,
1577      "supports_attachments": false,
1578      "options": {}
1579    },
1580    {
1581      "id": "meta/llama-3.1-8b",
1582      "name": "Llama 3.1 8B Instruct",
1583      "cost_per_1m_in": 0.1,
1584      "cost_per_1m_out": 0.1,
1585      "cost_per_1m_in_cached": 0,
1586      "cost_per_1m_out_cached": 0,
1587      "context_window": 128000,
1588      "default_max_tokens": 8000,
1589      "can_reason": false,
1590      "supports_attachments": false,
1591      "options": {}
1592    },
1593    {
1594      "id": "meta/llama-3.2-11b",
1595      "name": "Llama 3.2 11B Vision Instruct",
1596      "cost_per_1m_in": 0.16,
1597      "cost_per_1m_out": 0.16,
1598      "cost_per_1m_in_cached": 0,
1599      "cost_per_1m_out_cached": 0,
1600      "context_window": 128000,
1601      "default_max_tokens": 8000,
1602      "can_reason": false,
1603      "supports_attachments": true,
1604      "options": {}
1605    },
1606    {
1607      "id": "meta/llama-3.2-90b",
1608      "name": "Llama 3.2 90B Vision Instruct",
1609      "cost_per_1m_in": 0.72,
1610      "cost_per_1m_out": 0.72,
1611      "cost_per_1m_in_cached": 0,
1612      "cost_per_1m_out_cached": 0,
1613      "context_window": 128000,
1614      "default_max_tokens": 8000,
1615      "can_reason": false,
1616      "supports_attachments": true,
1617      "options": {}
1618    },
1619    {
1620      "id": "meta/llama-3.3-70b",
1621      "name": "Llama 3.3 70B Instruct",
1622      "cost_per_1m_in": 0.72,
1623      "cost_per_1m_out": 0.72,
1624      "cost_per_1m_in_cached": 0,
1625      "cost_per_1m_out_cached": 0,
1626      "context_window": 128000,
1627      "default_max_tokens": 8000,
1628      "can_reason": false,
1629      "supports_attachments": false,
1630      "options": {}
1631    },
1632    {
1633      "id": "meta/llama-4-maverick",
1634      "name": "Llama 4 Maverick 17B Instruct",
1635      "cost_per_1m_in": 0.24,
1636      "cost_per_1m_out": 0.97,
1637      "cost_per_1m_in_cached": 0,
1638      "cost_per_1m_out_cached": 0,
1639      "context_window": 128000,
1640      "default_max_tokens": 8000,
1641      "can_reason": false,
1642      "supports_attachments": true,
1643      "options": {}
1644    },
1645    {
1646      "id": "meta/llama-4-scout",
1647      "name": "Llama 4 Scout 17B Instruct",
1648      "cost_per_1m_in": 0.17,
1649      "cost_per_1m_out": 0.66,
1650      "cost_per_1m_in_cached": 0,
1651      "cost_per_1m_out_cached": 0,
1652      "context_window": 128000,
1653      "default_max_tokens": 8000,
1654      "can_reason": false,
1655      "supports_attachments": true,
1656      "options": {}
1657    },
1658    {
1659      "id": "meituan/longcat-flash-chat",
1660      "name": "LongCat Flash Chat",
1661      "cost_per_1m_in": 0,
1662      "cost_per_1m_out": 0,
1663      "cost_per_1m_in_cached": 0,
1664      "cost_per_1m_out_cached": 0,
1665      "context_window": 128000,
1666      "default_max_tokens": 8000,
1667      "can_reason": false,
1668      "supports_attachments": false,
1669      "options": {}
1670    },
1671    {
1672      "id": "meituan/longcat-flash-thinking",
1673      "name": "LongCat Flash Thinking",
1674      "cost_per_1m_in": 0.15,
1675      "cost_per_1m_out": 1.5,
1676      "cost_per_1m_in_cached": 0,
1677      "cost_per_1m_out_cached": 0,
1678      "context_window": 128000,
1679      "default_max_tokens": 8000,
1680      "can_reason": true,
1681      "reasoning_levels": [
1682        "low",
1683        "medium",
1684        "high"
1685      ],
1686      "default_reasoning_effort": "medium",
1687      "supports_attachments": false,
1688      "options": {}
1689    },
1690    {
1691      "id": "inception/mercury-2",
1692      "name": "Mercury 2",
1693      "cost_per_1m_in": 0.25,
1694      "cost_per_1m_out": 0.75,
1695      "cost_per_1m_in_cached": 0.025,
1696      "cost_per_1m_out_cached": 0,
1697      "context_window": 128000,
1698      "default_max_tokens": 8000,
1699      "can_reason": true,
1700      "reasoning_levels": [
1701        "low",
1702        "medium",
1703        "high"
1704      ],
1705      "default_reasoning_effort": "medium",
1706      "supports_attachments": false,
1707      "options": {}
1708    },
1709    {
1710      "id": "inception/mercury-coder-small",
1711      "name": "Mercury Coder Small Beta",
1712      "cost_per_1m_in": 0.25,
1713      "cost_per_1m_out": 1,
1714      "cost_per_1m_in_cached": 0,
1715      "cost_per_1m_out_cached": 0,
1716      "context_window": 32000,
1717      "default_max_tokens": 8000,
1718      "can_reason": false,
1719      "supports_attachments": false,
1720      "options": {}
1721    },
1722    {
1723      "id": "xiaomi/mimo-v2-flash",
1724      "name": "MiMo V2 Flash",
1725      "cost_per_1m_in": 0.1,
1726      "cost_per_1m_out": 0.3,
1727      "cost_per_1m_in_cached": 0.02,
1728      "cost_per_1m_out_cached": 0,
1729      "context_window": 262144,
1730      "default_max_tokens": 8000,
1731      "can_reason": true,
1732      "reasoning_levels": [
1733        "low",
1734        "medium",
1735        "high"
1736      ],
1737      "default_reasoning_effort": "medium",
1738      "supports_attachments": false,
1739      "options": {}
1740    },
1741    {
1742      "id": "minimax/minimax-m2",
1743      "name": "MiniMax M2",
1744      "cost_per_1m_in": 0.3,
1745      "cost_per_1m_out": 1.2,
1746      "cost_per_1m_in_cached": 0.03,
1747      "cost_per_1m_out_cached": 0.375,
1748      "context_window": 205000,
1749      "default_max_tokens": 8000,
1750      "can_reason": true,
1751      "reasoning_levels": [
1752        "low",
1753        "medium",
1754        "high"
1755      ],
1756      "default_reasoning_effort": "medium",
1757      "supports_attachments": false,
1758      "options": {}
1759    },
1760    {
1761      "id": "minimax/minimax-m2.1",
1762      "name": "MiniMax M2.1",
1763      "cost_per_1m_in": 0.3,
1764      "cost_per_1m_out": 1.2,
1765      "cost_per_1m_in_cached": 0.03,
1766      "cost_per_1m_out_cached": 0.375,
1767      "context_window": 204800,
1768      "default_max_tokens": 8000,
1769      "can_reason": true,
1770      "reasoning_levels": [
1771        "low",
1772        "medium",
1773        "high"
1774      ],
1775      "default_reasoning_effort": "medium",
1776      "supports_attachments": false,
1777      "options": {}
1778    },
1779    {
1780      "id": "minimax/minimax-m2.1-lightning",
1781      "name": "MiniMax M2.1 Lightning",
1782      "cost_per_1m_in": 0.3,
1783      "cost_per_1m_out": 2.4,
1784      "cost_per_1m_in_cached": 0.03,
1785      "cost_per_1m_out_cached": 0.375,
1786      "context_window": 204800,
1787      "default_max_tokens": 8000,
1788      "can_reason": true,
1789      "reasoning_levels": [
1790        "low",
1791        "medium",
1792        "high"
1793      ],
1794      "default_reasoning_effort": "medium",
1795      "supports_attachments": false,
1796      "options": {}
1797    },
1798    {
1799      "id": "minimax/minimax-m2.5",
1800      "name": "MiniMax M2.5",
1801      "cost_per_1m_in": 0.3,
1802      "cost_per_1m_out": 1.2,
1803      "cost_per_1m_in_cached": 0.03,
1804      "cost_per_1m_out_cached": 0.375,
1805      "context_window": 204800,
1806      "default_max_tokens": 8000,
1807      "can_reason": true,
1808      "reasoning_levels": [
1809        "low",
1810        "medium",
1811        "high"
1812      ],
1813      "default_reasoning_effort": "medium",
1814      "supports_attachments": false,
1815      "options": {}
1816    },
1817    {
1818      "id": "minimax/minimax-m2.5-highspeed",
1819      "name": "MiniMax M2.5 High Speed",
1820      "cost_per_1m_in": 0.6,
1821      "cost_per_1m_out": 2.4,
1822      "cost_per_1m_in_cached": 0.03,
1823      "cost_per_1m_out_cached": 0.375,
1824      "context_window": 0,
1825      "default_max_tokens": 0,
1826      "can_reason": true,
1827      "reasoning_levels": [
1828        "low",
1829        "medium",
1830        "high"
1831      ],
1832      "default_reasoning_effort": "medium",
1833      "supports_attachments": false,
1834      "options": {}
1835    },
1836    {
1837      "id": "mistral/ministral-3b",
1838      "name": "Ministral 3B",
1839      "cost_per_1m_in": 0.04,
1840      "cost_per_1m_out": 0.04,
1841      "cost_per_1m_in_cached": 0,
1842      "cost_per_1m_out_cached": 0,
1843      "context_window": 128000,
1844      "default_max_tokens": 4000,
1845      "can_reason": false,
1846      "supports_attachments": false,
1847      "options": {}
1848    },
1849    {
1850      "id": "mistral/ministral-8b",
1851      "name": "Ministral 8B",
1852      "cost_per_1m_in": 0.1,
1853      "cost_per_1m_out": 0.1,
1854      "cost_per_1m_in_cached": 0,
1855      "cost_per_1m_out_cached": 0,
1856      "context_window": 128000,
1857      "default_max_tokens": 4000,
1858      "can_reason": false,
1859      "supports_attachments": false,
1860      "options": {}
1861    },
1862    {
1863      "id": "mistral/codestral",
1864      "name": "Mistral Codestral",
1865      "cost_per_1m_in": 0.3,
1866      "cost_per_1m_out": 0.9,
1867      "cost_per_1m_in_cached": 0,
1868      "cost_per_1m_out_cached": 0,
1869      "context_window": 128000,
1870      "default_max_tokens": 4000,
1871      "can_reason": false,
1872      "supports_attachments": false,
1873      "options": {}
1874    },
1875    {
1876      "id": "mistral/mistral-medium",
1877      "name": "Mistral Medium 3.1",
1878      "cost_per_1m_in": 0.4,
1879      "cost_per_1m_out": 2,
1880      "cost_per_1m_in_cached": 0,
1881      "cost_per_1m_out_cached": 0,
1882      "context_window": 128000,
1883      "default_max_tokens": 8000,
1884      "can_reason": false,
1885      "supports_attachments": true,
1886      "options": {}
1887    },
1888    {
1889      "id": "mistral/mistral-small",
1890      "name": "Mistral Small",
1891      "cost_per_1m_in": 0.1,
1892      "cost_per_1m_out": 0.3,
1893      "cost_per_1m_in_cached": 0,
1894      "cost_per_1m_out_cached": 0,
1895      "context_window": 32000,
1896      "default_max_tokens": 4000,
1897      "can_reason": false,
1898      "supports_attachments": true,
1899      "options": {}
1900    },
1901    {
1902      "id": "nvidia/nemotron-nano-12b-v2-vl",
1903      "name": "Nvidia Nemotron Nano 12B V2 VL",
1904      "cost_per_1m_in": 0.2,
1905      "cost_per_1m_out": 0.6,
1906      "cost_per_1m_in_cached": 0,
1907      "cost_per_1m_out_cached": 0,
1908      "context_window": 131072,
1909      "default_max_tokens": 8000,
1910      "can_reason": true,
1911      "reasoning_levels": [
1912        "low",
1913        "medium",
1914        "high"
1915      ],
1916      "default_reasoning_effort": "medium",
1917      "supports_attachments": true,
1918      "options": {}
1919    },
1920    {
1921      "id": "nvidia/nemotron-nano-9b-v2",
1922      "name": "Nvidia Nemotron Nano 9B V2",
1923      "cost_per_1m_in": 0.06,
1924      "cost_per_1m_out": 0.23,
1925      "cost_per_1m_in_cached": 0,
1926      "cost_per_1m_out_cached": 0,
1927      "context_window": 131072,
1928      "default_max_tokens": 8000,
1929      "can_reason": true,
1930      "reasoning_levels": [
1931        "low",
1932        "medium",
1933        "high"
1934      ],
1935      "default_reasoning_effort": "medium",
1936      "supports_attachments": false,
1937      "options": {}
1938    },
1939    {
1940      "id": "mistral/pixtral-12b",
1941      "name": "Pixtral 12B 2409",
1942      "cost_per_1m_in": 0.15,
1943      "cost_per_1m_out": 0.15,
1944      "cost_per_1m_in_cached": 0,
1945      "cost_per_1m_out_cached": 0,
1946      "context_window": 128000,
1947      "default_max_tokens": 4000,
1948      "can_reason": false,
1949      "supports_attachments": true,
1950      "options": {}
1951    },
1952    {
1953      "id": "mistral/pixtral-large",
1954      "name": "Pixtral Large",
1955      "cost_per_1m_in": 2,
1956      "cost_per_1m_out": 6,
1957      "cost_per_1m_in_cached": 0,
1958      "cost_per_1m_out_cached": 0,
1959      "context_window": 128000,
1960      "default_max_tokens": 4000,
1961      "can_reason": false,
1962      "supports_attachments": true,
1963      "options": {}
1964    },
1965    {
1966      "id": "alibaba/qwen-3-32b",
1967      "name": "Qwen 3 32B",
1968      "cost_per_1m_in": 0.29,
1969      "cost_per_1m_out": 0.59,
1970      "cost_per_1m_in_cached": 0,
1971      "cost_per_1m_out_cached": 0,
1972      "context_window": 131072,
1973      "default_max_tokens": 8000,
1974      "can_reason": true,
1975      "reasoning_levels": [
1976        "low",
1977        "medium",
1978        "high"
1979      ],
1980      "default_reasoning_effort": "medium",
1981      "supports_attachments": false,
1982      "options": {}
1983    },
1984    {
1985      "id": "alibaba/qwen3-coder-30b-a3b",
1986      "name": "Qwen 3 Coder 30B A3B Instruct",
1987      "cost_per_1m_in": 0.15,
1988      "cost_per_1m_out": 0.6,
1989      "cost_per_1m_in_cached": 0,
1990      "cost_per_1m_out_cached": 0,
1991      "context_window": 262144,
1992      "default_max_tokens": 8000,
1993      "can_reason": true,
1994      "reasoning_levels": [
1995        "low",
1996        "medium",
1997        "high"
1998      ],
1999      "default_reasoning_effort": "medium",
2000      "supports_attachments": false,
2001      "options": {}
2002    },
2003    {
2004      "id": "alibaba/qwen3-max-thinking",
2005      "name": "Qwen 3 Max Thinking",
2006      "cost_per_1m_in": 1.2,
2007      "cost_per_1m_out": 6,
2008      "cost_per_1m_in_cached": 0.24,
2009      "cost_per_1m_out_cached": 0,
2010      "context_window": 256000,
2011      "default_max_tokens": 8000,
2012      "can_reason": true,
2013      "reasoning_levels": [
2014        "low",
2015        "medium",
2016        "high"
2017      ],
2018      "default_reasoning_effort": "medium",
2019      "supports_attachments": false,
2020      "options": {}
2021    },
2022    {
2023      "id": "alibaba/qwen3.5-flash",
2024      "name": "Qwen 3.5 Flash",
2025      "cost_per_1m_in": 0.1,
2026      "cost_per_1m_out": 0.4,
2027      "cost_per_1m_in_cached": 0.001,
2028      "cost_per_1m_out_cached": 0.125,
2029      "context_window": 1000000,
2030      "default_max_tokens": 8000,
2031      "can_reason": true,
2032      "reasoning_levels": [
2033        "low",
2034        "medium",
2035        "high"
2036      ],
2037      "default_reasoning_effort": "medium",
2038      "supports_attachments": true,
2039      "options": {}
2040    },
2041    {
2042      "id": "alibaba/qwen3.5-plus",
2043      "name": "Qwen 3.5 Plus",
2044      "cost_per_1m_in": 0.4,
2045      "cost_per_1m_out": 2.4,
2046      "cost_per_1m_in_cached": 0.04,
2047      "cost_per_1m_out_cached": 0.5,
2048      "context_window": 1000000,
2049      "default_max_tokens": 8000,
2050      "can_reason": true,
2051      "reasoning_levels": [
2052        "low",
2053        "medium",
2054        "high"
2055      ],
2056      "default_reasoning_effort": "medium",
2057      "supports_attachments": true,
2058      "options": {}
2059    },
2060    {
2061      "id": "alibaba/qwen3-235b-a22b-thinking",
2062      "name": "Qwen3 235B A22B Thinking 2507",
2063      "cost_per_1m_in": 0.3,
2064      "cost_per_1m_out": 2.9,
2065      "cost_per_1m_in_cached": 0,
2066      "cost_per_1m_out_cached": 0,
2067      "context_window": 262114,
2068      "default_max_tokens": 8000,
2069      "can_reason": true,
2070      "reasoning_levels": [
2071        "low",
2072        "medium",
2073        "high"
2074      ],
2075      "default_reasoning_effort": "medium",
2076      "supports_attachments": true,
2077      "options": {}
2078    },
2079    {
2080      "id": "alibaba/qwen3-coder",
2081      "name": "Qwen3 Coder 480B A35B Instruct",
2082      "cost_per_1m_in": 0.4,
2083      "cost_per_1m_out": 1.6,
2084      "cost_per_1m_in_cached": 0,
2085      "cost_per_1m_out_cached": 0,
2086      "context_window": 262144,
2087      "default_max_tokens": 8000,
2088      "can_reason": false,
2089      "supports_attachments": false,
2090      "options": {}
2091    },
2092    {
2093      "id": "alibaba/qwen3-coder-next",
2094      "name": "Qwen3 Coder Next",
2095      "cost_per_1m_in": 0.5,
2096      "cost_per_1m_out": 1.2,
2097      "cost_per_1m_in_cached": 0,
2098      "cost_per_1m_out_cached": 0,
2099      "context_window": 256000,
2100      "default_max_tokens": 8000,
2101      "can_reason": false,
2102      "supports_attachments": false,
2103      "options": {}
2104    },
2105    {
2106      "id": "alibaba/qwen3-coder-plus",
2107      "name": "Qwen3 Coder Plus",
2108      "cost_per_1m_in": 1,
2109      "cost_per_1m_out": 5,
2110      "cost_per_1m_in_cached": 0.2,
2111      "cost_per_1m_out_cached": 0,
2112      "context_window": 1000000,
2113      "default_max_tokens": 8000,
2114      "can_reason": false,
2115      "supports_attachments": false,
2116      "options": {}
2117    },
2118    {
2119      "id": "alibaba/qwen3-max",
2120      "name": "Qwen3 Max",
2121      "cost_per_1m_in": 1.2,
2122      "cost_per_1m_out": 6,
2123      "cost_per_1m_in_cached": 0.24,
2124      "cost_per_1m_out_cached": 0,
2125      "context_window": 262144,
2126      "default_max_tokens": 8000,
2127      "can_reason": false,
2128      "supports_attachments": false,
2129      "options": {}
2130    },
2131    {
2132      "id": "alibaba/qwen3-max-preview",
2133      "name": "Qwen3 Max Preview",
2134      "cost_per_1m_in": 1.2,
2135      "cost_per_1m_out": 6,
2136      "cost_per_1m_in_cached": 0.24,
2137      "cost_per_1m_out_cached": 0,
2138      "context_window": 262144,
2139      "default_max_tokens": 8000,
2140      "can_reason": false,
2141      "supports_attachments": false,
2142      "options": {}
2143    },
2144    {
2145      "id": "alibaba/qwen3-vl-thinking",
2146      "name": "Qwen3 VL 235B A22B Thinking",
2147      "cost_per_1m_in": 0.22,
2148      "cost_per_1m_out": 0.88,
2149      "cost_per_1m_in_cached": 0,
2150      "cost_per_1m_out_cached": 0,
2151      "context_window": 256000,
2152      "default_max_tokens": 8000,
2153      "can_reason": true,
2154      "reasoning_levels": [
2155        "low",
2156        "medium",
2157        "high"
2158      ],
2159      "default_reasoning_effort": "medium",
2160      "supports_attachments": true,
2161      "options": {}
2162    },
2163    {
2164      "id": "alibaba/qwen-3-14b",
2165      "name": "Qwen3-14B",
2166      "cost_per_1m_in": 0.06,
2167      "cost_per_1m_out": 0.24,
2168      "cost_per_1m_in_cached": 0,
2169      "cost_per_1m_out_cached": 0,
2170      "context_window": 40960,
2171      "default_max_tokens": 8000,
2172      "can_reason": true,
2173      "reasoning_levels": [
2174        "low",
2175        "medium",
2176        "high"
2177      ],
2178      "default_reasoning_effort": "medium",
2179      "supports_attachments": false,
2180      "options": {}
2181    },
2182    {
2183      "id": "alibaba/qwen-3-235b",
2184      "name": "Qwen3-235B-A22B",
2185      "cost_per_1m_in": 0.071,
2186      "cost_per_1m_out": 0.463,
2187      "cost_per_1m_in_cached": 0,
2188      "cost_per_1m_out_cached": 0,
2189      "context_window": 40960,
2190      "default_max_tokens": 8000,
2191      "can_reason": false,
2192      "supports_attachments": false,
2193      "options": {}
2194    },
2195    {
2196      "id": "alibaba/qwen-3-30b",
2197      "name": "Qwen3-30B-A3B",
2198      "cost_per_1m_in": 0.08,
2199      "cost_per_1m_out": 0.29,
2200      "cost_per_1m_in_cached": 0,
2201      "cost_per_1m_out_cached": 0,
2202      "context_window": 40960,
2203      "default_max_tokens": 8000,
2204      "can_reason": true,
2205      "reasoning_levels": [
2206        "low",
2207        "medium",
2208        "high"
2209      ],
2210      "default_reasoning_effort": "medium",
2211      "supports_attachments": false,
2212      "options": {}
2213    },
2214    {
2215      "id": "bytedance/seed-1.6",
2216      "name": "Seed 1.6",
2217      "cost_per_1m_in": 0.25,
2218      "cost_per_1m_out": 2,
2219      "cost_per_1m_in_cached": 0.05,
2220      "cost_per_1m_out_cached": 0,
2221      "context_window": 256000,
2222      "default_max_tokens": 8000,
2223      "can_reason": true,
2224      "reasoning_levels": [
2225        "low",
2226        "medium",
2227        "high"
2228      ],
2229      "default_reasoning_effort": "medium",
2230      "supports_attachments": false,
2231      "options": {}
2232    },
2233    {
2234      "id": "perplexity/sonar",
2235      "name": "Sonar",
2236      "cost_per_1m_in": 1,
2237      "cost_per_1m_out": 1,
2238      "cost_per_1m_in_cached": 0,
2239      "cost_per_1m_out_cached": 0,
2240      "context_window": 127000,
2241      "default_max_tokens": 8000,
2242      "can_reason": false,
2243      "supports_attachments": true,
2244      "options": {}
2245    },
2246    {
2247      "id": "perplexity/sonar-pro",
2248      "name": "Sonar Pro",
2249      "cost_per_1m_in": 3,
2250      "cost_per_1m_out": 15,
2251      "cost_per_1m_in_cached": 0,
2252      "cost_per_1m_out_cached": 0,
2253      "context_window": 200000,
2254      "default_max_tokens": 8000,
2255      "can_reason": false,
2256      "supports_attachments": true,
2257      "options": {}
2258    },
2259    {
2260      "id": "arcee-ai/trinity-large-preview",
2261      "name": "Trinity Large Preview",
2262      "cost_per_1m_in": 0.25,
2263      "cost_per_1m_out": 1,
2264      "cost_per_1m_in_cached": 0,
2265      "cost_per_1m_out_cached": 0,
2266      "context_window": 131000,
2267      "default_max_tokens": 8000,
2268      "can_reason": false,
2269      "supports_attachments": false,
2270      "options": {}
2271    },
2272    {
2273      "id": "openai/gpt-oss-20b",
2274      "name": "gpt-oss-20b",
2275      "cost_per_1m_in": 0.07,
2276      "cost_per_1m_out": 0.3,
2277      "cost_per_1m_in_cached": 0,
2278      "cost_per_1m_out_cached": 0,
2279      "context_window": 128000,
2280      "default_max_tokens": 8000,
2281      "can_reason": true,
2282      "reasoning_levels": [
2283        "low",
2284        "medium",
2285        "high"
2286      ],
2287      "default_reasoning_effort": "medium",
2288      "supports_attachments": false,
2289      "options": {}
2290    },
2291    {
2292      "id": "openai/gpt-oss-safeguard-20b",
2293      "name": "gpt-oss-safeguard-20b",
2294      "cost_per_1m_in": 0.075,
2295      "cost_per_1m_out": 0.3,
2296      "cost_per_1m_in_cached": 0.037,
2297      "cost_per_1m_out_cached": 0,
2298      "context_window": 131072,
2299      "default_max_tokens": 8000,
2300      "can_reason": true,
2301      "reasoning_levels": [
2302        "low",
2303        "medium",
2304        "high"
2305      ],
2306      "default_reasoning_effort": "medium",
2307      "supports_attachments": false,
2308      "options": {}
2309    },
2310    {
2311      "id": "openai/o1",
2312      "name": "o1",
2313      "cost_per_1m_in": 15,
2314      "cost_per_1m_out": 60,
2315      "cost_per_1m_in_cached": 7.5,
2316      "cost_per_1m_out_cached": 0,
2317      "context_window": 200000,
2318      "default_max_tokens": 8000,
2319      "can_reason": true,
2320      "reasoning_levels": [
2321        "low",
2322        "medium",
2323        "high"
2324      ],
2325      "default_reasoning_effort": "medium",
2326      "supports_attachments": true,
2327      "options": {}
2328    },
2329    {
2330      "id": "openai/o3",
2331      "name": "o3",
2332      "cost_per_1m_in": 2,
2333      "cost_per_1m_out": 8,
2334      "cost_per_1m_in_cached": 0.5,
2335      "cost_per_1m_out_cached": 0,
2336      "context_window": 200000,
2337      "default_max_tokens": 8000,
2338      "can_reason": true,
2339      "reasoning_levels": [
2340        "low",
2341        "medium",
2342        "high"
2343      ],
2344      "default_reasoning_effort": "medium",
2345      "supports_attachments": true,
2346      "options": {}
2347    },
2348    {
2349      "id": "openai/o3-pro",
2350      "name": "o3 Pro",
2351      "cost_per_1m_in": 20,
2352      "cost_per_1m_out": 80,
2353      "cost_per_1m_in_cached": 0,
2354      "cost_per_1m_out_cached": 0,
2355      "context_window": 200000,
2356      "default_max_tokens": 8000,
2357      "can_reason": true,
2358      "reasoning_levels": [
2359        "low",
2360        "medium",
2361        "high"
2362      ],
2363      "default_reasoning_effort": "medium",
2364      "supports_attachments": true,
2365      "options": {}
2366    },
2367    {
2368      "id": "openai/o3-deep-research",
2369      "name": "o3-deep-research",
2370      "cost_per_1m_in": 10,
2371      "cost_per_1m_out": 40,
2372      "cost_per_1m_in_cached": 2.5,
2373      "cost_per_1m_out_cached": 0,
2374      "context_window": 200000,
2375      "default_max_tokens": 8000,
2376      "can_reason": true,
2377      "reasoning_levels": [
2378        "low",
2379        "medium",
2380        "high"
2381      ],
2382      "default_reasoning_effort": "medium",
2383      "supports_attachments": true,
2384      "options": {}
2385    },
2386    {
2387      "id": "openai/o3-mini",
2388      "name": "o3-mini",
2389      "cost_per_1m_in": 1.1,
2390      "cost_per_1m_out": 4.4,
2391      "cost_per_1m_in_cached": 0.55,
2392      "cost_per_1m_out_cached": 0,
2393      "context_window": 200000,
2394      "default_max_tokens": 8000,
2395      "can_reason": true,
2396      "reasoning_levels": [
2397        "low",
2398        "medium",
2399        "high"
2400      ],
2401      "default_reasoning_effort": "medium",
2402      "supports_attachments": false,
2403      "options": {}
2404    },
2405    {
2406      "id": "openai/o4-mini",
2407      "name": "o4-mini",
2408      "cost_per_1m_in": 1.1,
2409      "cost_per_1m_out": 4.4,
2410      "cost_per_1m_in_cached": 0.275,
2411      "cost_per_1m_out_cached": 0,
2412      "context_window": 200000,
2413      "default_max_tokens": 8000,
2414      "can_reason": true,
2415      "reasoning_levels": [
2416        "low",
2417        "medium",
2418        "high"
2419      ],
2420      "default_reasoning_effort": "medium",
2421      "supports_attachments": true,
2422      "options": {}
2423    },
2424    {
2425      "id": "vercel/v0-1.0-md",
2426      "name": "v0-1.0-md",
2427      "cost_per_1m_in": 3,
2428      "cost_per_1m_out": 15,
2429      "cost_per_1m_in_cached": 0,
2430      "cost_per_1m_out_cached": 0,
2431      "context_window": 128000,
2432      "default_max_tokens": 8000,
2433      "can_reason": false,
2434      "supports_attachments": true,
2435      "options": {}
2436    },
2437    {
2438      "id": "vercel/v0-1.5-md",
2439      "name": "v0-1.5-md",
2440      "cost_per_1m_in": 3,
2441      "cost_per_1m_out": 15,
2442      "cost_per_1m_in_cached": 0,
2443      "cost_per_1m_out_cached": 0,
2444      "context_window": 128000,
2445      "default_max_tokens": 8000,
2446      "can_reason": false,
2447      "supports_attachments": true,
2448      "options": {}
2449    }
2450  ],
2451  "default_headers": {
2452    "HTTP-Referer": "https://charm.land",
2453    "X-Title": "Crush"
2454  }
2455}