vercel.json

   1{
   2  "name": "Vercel",
   3  "id": "vercel",
   4  "api_key": "$VERCEL_API_KEY",
   5  "api_endpoint": "https://ai-gateway.vercel.sh/v1",
   6  "type": "vercel",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-haiku-4.5",
   9  "models": [
  10    {
  11      "id": "anthropic/claude-3-haiku",
  12      "name": "Claude 3 Haiku",
  13      "cost_per_1m_in": 0.25,
  14      "cost_per_1m_out": 1.25,
  15      "cost_per_1m_in_cached": 0.03,
  16      "cost_per_1m_out_cached": 0.3,
  17      "context_window": 200000,
  18      "default_max_tokens": 4096,
  19      "can_reason": false,
  20      "supports_attachments": true,
  21      "options": {}
  22    },
  23    {
  24      "id": "anthropic/claude-3.5-haiku",
  25      "name": "Claude 3.5 Haiku",
  26      "cost_per_1m_in": 0.8,
  27      "cost_per_1m_out": 4,
  28      "cost_per_1m_in_cached": 0.08,
  29      "cost_per_1m_out_cached": 1,
  30      "context_window": 200000,
  31      "default_max_tokens": 8000,
  32      "can_reason": false,
  33      "supports_attachments": true,
  34      "options": {}
  35    },
  36    {
  37      "id": "anthropic/claude-3.5-sonnet",
  38      "name": "Claude 3.5 Sonnet",
  39      "cost_per_1m_in": 3,
  40      "cost_per_1m_out": 15,
  41      "cost_per_1m_in_cached": 0.3,
  42      "cost_per_1m_out_cached": 3.75,
  43      "context_window": 200000,
  44      "default_max_tokens": 8000,
  45      "can_reason": false,
  46      "supports_attachments": true,
  47      "options": {}
  48    },
  49    {
  50      "id": "anthropic/claude-3.5-sonnet-20240620",
  51      "name": "Claude 3.5 Sonnet (2024-06-20)",
  52      "cost_per_1m_in": 3,
  53      "cost_per_1m_out": 15,
  54      "cost_per_1m_in_cached": 0.3,
  55      "cost_per_1m_out_cached": 3.75,
  56      "context_window": 200000,
  57      "default_max_tokens": 8000,
  58      "can_reason": false,
  59      "supports_attachments": true,
  60      "options": {}
  61    },
  62    {
  63      "id": "anthropic/claude-3.7-sonnet",
  64      "name": "Claude 3.7 Sonnet",
  65      "cost_per_1m_in": 3,
  66      "cost_per_1m_out": 15,
  67      "cost_per_1m_in_cached": 0.3,
  68      "cost_per_1m_out_cached": 3.75,
  69      "context_window": 200000,
  70      "default_max_tokens": 8000,
  71      "can_reason": true,
  72      "reasoning_levels": [
  73        "none",
  74        "minimal",
  75        "low",
  76        "medium",
  77        "high",
  78        "xhigh"
  79      ],
  80      "default_reasoning_effort": "medium",
  81      "supports_attachments": true,
  82      "options": {}
  83    },
  84    {
  85      "id": "anthropic/claude-haiku-4.5",
  86      "name": "Claude Haiku 4.5",
  87      "cost_per_1m_in": 1,
  88      "cost_per_1m_out": 5,
  89      "cost_per_1m_in_cached": 0.1,
  90      "cost_per_1m_out_cached": 1.25,
  91      "context_window": 200000,
  92      "default_max_tokens": 8000,
  93      "can_reason": true,
  94      "reasoning_levels": [
  95        "none",
  96        "minimal",
  97        "low",
  98        "medium",
  99        "high",
 100        "xhigh"
 101      ],
 102      "default_reasoning_effort": "medium",
 103      "supports_attachments": true,
 104      "options": {}
 105    },
 106    {
 107      "id": "anthropic/claude-opus-4",
 108      "name": "Claude Opus 4",
 109      "cost_per_1m_in": 15,
 110      "cost_per_1m_out": 75,
 111      "cost_per_1m_in_cached": 1.5,
 112      "cost_per_1m_out_cached": 18.75,
 113      "context_window": 200000,
 114      "default_max_tokens": 8000,
 115      "can_reason": true,
 116      "reasoning_levels": [
 117        "none",
 118        "minimal",
 119        "low",
 120        "medium",
 121        "high",
 122        "xhigh"
 123      ],
 124      "default_reasoning_effort": "medium",
 125      "supports_attachments": true,
 126      "options": {}
 127    },
 128    {
 129      "id": "anthropic/claude-opus-4.1",
 130      "name": "Claude Opus 4.1",
 131      "cost_per_1m_in": 15,
 132      "cost_per_1m_out": 75,
 133      "cost_per_1m_in_cached": 1.5,
 134      "cost_per_1m_out_cached": 18.75,
 135      "context_window": 200000,
 136      "default_max_tokens": 8000,
 137      "can_reason": true,
 138      "reasoning_levels": [
 139        "none",
 140        "minimal",
 141        "low",
 142        "medium",
 143        "high",
 144        "xhigh"
 145      ],
 146      "default_reasoning_effort": "medium",
 147      "supports_attachments": true,
 148      "options": {}
 149    },
 150    {
 151      "id": "anthropic/claude-opus-4.5",
 152      "name": "Claude Opus 4.5",
 153      "cost_per_1m_in": 5,
 154      "cost_per_1m_out": 25,
 155      "cost_per_1m_in_cached": 0.5,
 156      "cost_per_1m_out_cached": 6.25,
 157      "context_window": 200000,
 158      "default_max_tokens": 8000,
 159      "can_reason": true,
 160      "reasoning_levels": [
 161        "none",
 162        "minimal",
 163        "low",
 164        "medium",
 165        "high",
 166        "xhigh"
 167      ],
 168      "default_reasoning_effort": "medium",
 169      "supports_attachments": true,
 170      "options": {}
 171    },
 172    {
 173      "id": "anthropic/claude-opus-4.6",
 174      "name": "Claude Opus 4.6",
 175      "cost_per_1m_in": 5,
 176      "cost_per_1m_out": 25,
 177      "cost_per_1m_in_cached": 0.5,
 178      "cost_per_1m_out_cached": 6.25,
 179      "context_window": 1000000,
 180      "default_max_tokens": 8000,
 181      "can_reason": true,
 182      "reasoning_levels": [
 183        "none",
 184        "minimal",
 185        "low",
 186        "medium",
 187        "high",
 188        "xhigh"
 189      ],
 190      "default_reasoning_effort": "medium",
 191      "supports_attachments": true,
 192      "options": {}
 193    },
 194    {
 195      "id": "anthropic/claude-sonnet-4",
 196      "name": "Claude Sonnet 4",
 197      "cost_per_1m_in": 3,
 198      "cost_per_1m_out": 15,
 199      "cost_per_1m_in_cached": 0.3,
 200      "cost_per_1m_out_cached": 3.75,
 201      "context_window": 1000000,
 202      "default_max_tokens": 8000,
 203      "can_reason": true,
 204      "reasoning_levels": [
 205        "none",
 206        "minimal",
 207        "low",
 208        "medium",
 209        "high",
 210        "xhigh"
 211      ],
 212      "default_reasoning_effort": "medium",
 213      "supports_attachments": true,
 214      "options": {}
 215    },
 216    {
 217      "id": "anthropic/claude-sonnet-4.5",
 218      "name": "Claude Sonnet 4.5",
 219      "cost_per_1m_in": 3,
 220      "cost_per_1m_out": 15,
 221      "cost_per_1m_in_cached": 0.3,
 222      "cost_per_1m_out_cached": 3.75,
 223      "context_window": 1000000,
 224      "default_max_tokens": 8000,
 225      "can_reason": true,
 226      "reasoning_levels": [
 227        "none",
 228        "minimal",
 229        "low",
 230        "medium",
 231        "high",
 232        "xhigh"
 233      ],
 234      "default_reasoning_effort": "medium",
 235      "supports_attachments": true,
 236      "options": {}
 237    },
 238    {
 239      "id": "anthropic/claude-sonnet-4.6",
 240      "name": "Claude Sonnet 4.6",
 241      "cost_per_1m_in": 3,
 242      "cost_per_1m_out": 15,
 243      "cost_per_1m_in_cached": 0.3,
 244      "cost_per_1m_out_cached": 3.75,
 245      "context_window": 1000000,
 246      "default_max_tokens": 8000,
 247      "can_reason": true,
 248      "reasoning_levels": [
 249        "none",
 250        "minimal",
 251        "low",
 252        "medium",
 253        "high",
 254        "xhigh"
 255      ],
 256      "default_reasoning_effort": "medium",
 257      "supports_attachments": true,
 258      "options": {}
 259    },
 260    {
 261      "id": "cohere/command-a",
 262      "name": "Command A",
 263      "cost_per_1m_in": 2.5,
 264      "cost_per_1m_out": 10,
 265      "cost_per_1m_in_cached": 0,
 266      "cost_per_1m_out_cached": 0,
 267      "context_window": 256000,
 268      "default_max_tokens": 8000,
 269      "can_reason": false,
 270      "supports_attachments": false,
 271      "options": {}
 272    },
 273    {
 274      "id": "deepseek/deepseek-v3",
 275      "name": "DeepSeek V3 0324",
 276      "cost_per_1m_in": 0.77,
 277      "cost_per_1m_out": 0.77,
 278      "cost_per_1m_in_cached": 0,
 279      "cost_per_1m_out_cached": 0,
 280      "context_window": 163840,
 281      "default_max_tokens": 8000,
 282      "can_reason": false,
 283      "supports_attachments": false,
 284      "options": {}
 285    },
 286    {
 287      "id": "deepseek/deepseek-v3.1-terminus",
 288      "name": "DeepSeek V3.1 Terminus",
 289      "cost_per_1m_in": 0.27,
 290      "cost_per_1m_out": 1,
 291      "cost_per_1m_in_cached": 0.135,
 292      "cost_per_1m_out_cached": 0,
 293      "context_window": 131072,
 294      "default_max_tokens": 8000,
 295      "can_reason": true,
 296      "reasoning_levels": [
 297        "low",
 298        "medium",
 299        "high"
 300      ],
 301      "default_reasoning_effort": "medium",
 302      "supports_attachments": false,
 303      "options": {}
 304    },
 305    {
 306      "id": "deepseek/deepseek-v3.2",
 307      "name": "DeepSeek V3.2",
 308      "cost_per_1m_in": 0.28,
 309      "cost_per_1m_out": 0.42,
 310      "cost_per_1m_in_cached": 0.028,
 311      "cost_per_1m_out_cached": 0,
 312      "context_window": 128000,
 313      "default_max_tokens": 8000,
 314      "can_reason": false,
 315      "supports_attachments": false,
 316      "options": {}
 317    },
 318    {
 319      "id": "deepseek/deepseek-v3.2-thinking",
 320      "name": "DeepSeek V3.2 Thinking",
 321      "cost_per_1m_in": 0.28,
 322      "cost_per_1m_out": 0.42,
 323      "cost_per_1m_in_cached": 0.028,
 324      "cost_per_1m_out_cached": 0,
 325      "context_window": 128000,
 326      "default_max_tokens": 8000,
 327      "can_reason": true,
 328      "reasoning_levels": [
 329        "low",
 330        "medium",
 331        "high"
 332      ],
 333      "default_reasoning_effort": "medium",
 334      "supports_attachments": false,
 335      "options": {}
 336    },
 337    {
 338      "id": "deepseek/deepseek-r1",
 339      "name": "DeepSeek-R1",
 340      "cost_per_1m_in": 1.35,
 341      "cost_per_1m_out": 5.4,
 342      "cost_per_1m_in_cached": 0,
 343      "cost_per_1m_out_cached": 0,
 344      "context_window": 128000,
 345      "default_max_tokens": 8000,
 346      "can_reason": true,
 347      "reasoning_levels": [
 348        "low",
 349        "medium",
 350        "high"
 351      ],
 352      "default_reasoning_effort": "medium",
 353      "supports_attachments": false,
 354      "options": {}
 355    },
 356    {
 357      "id": "deepseek/deepseek-v3.1",
 358      "name": "DeepSeek-V3.1",
 359      "cost_per_1m_in": 0.56,
 360      "cost_per_1m_out": 1.68,
 361      "cost_per_1m_in_cached": 0.28,
 362      "cost_per_1m_out_cached": 0,
 363      "context_window": 163840,
 364      "default_max_tokens": 8000,
 365      "can_reason": true,
 366      "reasoning_levels": [
 367        "low",
 368        "medium",
 369        "high"
 370      ],
 371      "default_reasoning_effort": "medium",
 372      "supports_attachments": false,
 373      "options": {}
 374    },
 375    {
 376      "id": "mistral/devstral-2",
 377      "name": "Devstral 2",
 378      "cost_per_1m_in": 0.4,
 379      "cost_per_1m_out": 2,
 380      "cost_per_1m_in_cached": 0,
 381      "cost_per_1m_out_cached": 0,
 382      "context_window": 256000,
 383      "default_max_tokens": 8000,
 384      "can_reason": false,
 385      "supports_attachments": false,
 386      "options": {}
 387    },
 388    {
 389      "id": "mistral/devstral-small",
 390      "name": "Devstral Small 1.1",
 391      "cost_per_1m_in": 0.1,
 392      "cost_per_1m_out": 0.3,
 393      "cost_per_1m_in_cached": 0,
 394      "cost_per_1m_out_cached": 0,
 395      "context_window": 128000,
 396      "default_max_tokens": 8000,
 397      "can_reason": false,
 398      "supports_attachments": false,
 399      "options": {}
 400    },
 401    {
 402      "id": "mistral/devstral-small-2",
 403      "name": "Devstral Small 2",
 404      "cost_per_1m_in": 0.1,
 405      "cost_per_1m_out": 0.3,
 406      "cost_per_1m_in_cached": 0,
 407      "cost_per_1m_out_cached": 0,
 408      "context_window": 256000,
 409      "default_max_tokens": 8000,
 410      "can_reason": false,
 411      "supports_attachments": false,
 412      "options": {}
 413    },
 414    {
 415      "id": "zai/glm-4.5-air",
 416      "name": "GLM 4.5 Air",
 417      "cost_per_1m_in": 0.2,
 418      "cost_per_1m_out": 1.1,
 419      "cost_per_1m_in_cached": 0.03,
 420      "cost_per_1m_out_cached": 0,
 421      "context_window": 128000,
 422      "default_max_tokens": 8000,
 423      "can_reason": true,
 424      "reasoning_levels": [
 425        "low",
 426        "medium",
 427        "high"
 428      ],
 429      "default_reasoning_effort": "medium",
 430      "supports_attachments": false,
 431      "options": {}
 432    },
 433    {
 434      "id": "zai/glm-4.5v",
 435      "name": "GLM 4.5V",
 436      "cost_per_1m_in": 0.6,
 437      "cost_per_1m_out": 1.8,
 438      "cost_per_1m_in_cached": 0.11,
 439      "cost_per_1m_out_cached": 0,
 440      "context_window": 66000,
 441      "default_max_tokens": 8000,
 442      "can_reason": false,
 443      "supports_attachments": true,
 444      "options": {}
 445    },
 446    {
 447      "id": "zai/glm-4.6",
 448      "name": "GLM 4.6",
 449      "cost_per_1m_in": 0.6,
 450      "cost_per_1m_out": 2.2,
 451      "cost_per_1m_in_cached": 0.11,
 452      "cost_per_1m_out_cached": 0,
 453      "context_window": 200000,
 454      "default_max_tokens": 8000,
 455      "can_reason": true,
 456      "reasoning_levels": [
 457        "low",
 458        "medium",
 459        "high"
 460      ],
 461      "default_reasoning_effort": "medium",
 462      "supports_attachments": false,
 463      "options": {}
 464    },
 465    {
 466      "id": "zai/glm-4.7",
 467      "name": "GLM 4.7",
 468      "cost_per_1m_in": 0.6,
 469      "cost_per_1m_out": 2.2,
 470      "cost_per_1m_in_cached": 0.11,
 471      "cost_per_1m_out_cached": 0,
 472      "context_window": 200000,
 473      "default_max_tokens": 8000,
 474      "can_reason": true,
 475      "reasoning_levels": [
 476        "low",
 477        "medium",
 478        "high"
 479      ],
 480      "default_reasoning_effort": "medium",
 481      "supports_attachments": false,
 482      "options": {}
 483    },
 484    {
 485      "id": "zai/glm-4.7-flash",
 486      "name": "GLM 4.7 Flash",
 487      "cost_per_1m_in": 0.07,
 488      "cost_per_1m_out": 0.4,
 489      "cost_per_1m_in_cached": 0,
 490      "cost_per_1m_out_cached": 0,
 491      "context_window": 200000,
 492      "default_max_tokens": 8000,
 493      "can_reason": true,
 494      "reasoning_levels": [
 495        "low",
 496        "medium",
 497        "high"
 498      ],
 499      "default_reasoning_effort": "medium",
 500      "supports_attachments": false,
 501      "options": {}
 502    },
 503    {
 504      "id": "zai/glm-4.7-flashx",
 505      "name": "GLM 4.7 FlashX",
 506      "cost_per_1m_in": 0.06,
 507      "cost_per_1m_out": 0.4,
 508      "cost_per_1m_in_cached": 0.01,
 509      "cost_per_1m_out_cached": 0,
 510      "context_window": 200000,
 511      "default_max_tokens": 8000,
 512      "can_reason": true,
 513      "reasoning_levels": [
 514        "low",
 515        "medium",
 516        "high"
 517      ],
 518      "default_reasoning_effort": "medium",
 519      "supports_attachments": false,
 520      "options": {}
 521    },
 522    {
 523      "id": "zai/glm-5",
 524      "name": "GLM 5",
 525      "cost_per_1m_in": 1,
 526      "cost_per_1m_out": 3.2,
 527      "cost_per_1m_in_cached": 0.2,
 528      "cost_per_1m_out_cached": 0,
 529      "context_window": 202800,
 530      "default_max_tokens": 8000,
 531      "can_reason": true,
 532      "reasoning_levels": [
 533        "low",
 534        "medium",
 535        "high"
 536      ],
 537      "default_reasoning_effort": "medium",
 538      "supports_attachments": false,
 539      "options": {}
 540    },
 541    {
 542      "id": "zai/glm-5-turbo",
 543      "name": "GLM 5 Turbo",
 544      "cost_per_1m_in": 1.2,
 545      "cost_per_1m_out": 4,
 546      "cost_per_1m_in_cached": 0.24,
 547      "cost_per_1m_out_cached": 0,
 548      "context_window": 202800,
 549      "default_max_tokens": 8000,
 550      "can_reason": true,
 551      "reasoning_levels": [
 552        "low",
 553        "medium",
 554        "high"
 555      ],
 556      "default_reasoning_effort": "medium",
 557      "supports_attachments": false,
 558      "options": {}
 559    },
 560    {
 561      "id": "zai/glm-4.5",
 562      "name": "GLM-4.5",
 563      "cost_per_1m_in": 0.6,
 564      "cost_per_1m_out": 2.2,
 565      "cost_per_1m_in_cached": 0.11,
 566      "cost_per_1m_out_cached": 0,
 567      "context_window": 128000,
 568      "default_max_tokens": 8000,
 569      "can_reason": true,
 570      "reasoning_levels": [
 571        "low",
 572        "medium",
 573        "high"
 574      ],
 575      "default_reasoning_effort": "medium",
 576      "supports_attachments": false,
 577      "options": {}
 578    },
 579    {
 580      "id": "zai/glm-4.6v",
 581      "name": "GLM-4.6V",
 582      "cost_per_1m_in": 0.3,
 583      "cost_per_1m_out": 0.9,
 584      "cost_per_1m_in_cached": 0.05,
 585      "cost_per_1m_out_cached": 0,
 586      "context_window": 128000,
 587      "default_max_tokens": 8000,
 588      "can_reason": true,
 589      "reasoning_levels": [
 590        "low",
 591        "medium",
 592        "high"
 593      ],
 594      "default_reasoning_effort": "medium",
 595      "supports_attachments": true,
 596      "options": {}
 597    },
 598    {
 599      "id": "zai/glm-4.6v-flash",
 600      "name": "GLM-4.6V-Flash",
 601      "cost_per_1m_in": 0,
 602      "cost_per_1m_out": 0,
 603      "cost_per_1m_in_cached": 0,
 604      "cost_per_1m_out_cached": 0,
 605      "context_window": 128000,
 606      "default_max_tokens": 8000,
 607      "can_reason": true,
 608      "reasoning_levels": [
 609        "low",
 610        "medium",
 611        "high"
 612      ],
 613      "default_reasoning_effort": "medium",
 614      "supports_attachments": true,
 615      "options": {}
 616    },
 617    {
 618      "id": "openai/gpt-5-chat",
 619      "name": "GPT 5 Chat",
 620      "cost_per_1m_in": 1.25,
 621      "cost_per_1m_out": 10,
 622      "cost_per_1m_in_cached": 0.125,
 623      "cost_per_1m_out_cached": 0,
 624      "context_window": 128000,
 625      "default_max_tokens": 8000,
 626      "can_reason": true,
 627      "reasoning_levels": [
 628        "low",
 629        "medium",
 630        "high"
 631      ],
 632      "default_reasoning_effort": "medium",
 633      "supports_attachments": true,
 634      "options": {}
 635    },
 636    {
 637      "id": "openai/gpt-5.1-codex-max",
 638      "name": "GPT 5.1 Codex Max",
 639      "cost_per_1m_in": 1.25,
 640      "cost_per_1m_out": 10,
 641      "cost_per_1m_in_cached": 0.125,
 642      "cost_per_1m_out_cached": 0,
 643      "context_window": 400000,
 644      "default_max_tokens": 8000,
 645      "can_reason": true,
 646      "reasoning_levels": [
 647        "low",
 648        "medium",
 649        "high"
 650      ],
 651      "default_reasoning_effort": "medium",
 652      "supports_attachments": true,
 653      "options": {}
 654    },
 655    {
 656      "id": "openai/gpt-5.1-codex-mini",
 657      "name": "GPT 5.1 Codex Mini",
 658      "cost_per_1m_in": 0.25,
 659      "cost_per_1m_out": 2,
 660      "cost_per_1m_in_cached": 0.025,
 661      "cost_per_1m_out_cached": 0,
 662      "context_window": 400000,
 663      "default_max_tokens": 8000,
 664      "can_reason": true,
 665      "reasoning_levels": [
 666        "low",
 667        "medium",
 668        "high"
 669      ],
 670      "default_reasoning_effort": "medium",
 671      "supports_attachments": true,
 672      "options": {}
 673    },
 674    {
 675      "id": "openai/gpt-5.1-thinking",
 676      "name": "GPT 5.1 Thinking",
 677      "cost_per_1m_in": 1.25,
 678      "cost_per_1m_out": 10,
 679      "cost_per_1m_in_cached": 0.125,
 680      "cost_per_1m_out_cached": 0,
 681      "context_window": 400000,
 682      "default_max_tokens": 8000,
 683      "can_reason": true,
 684      "reasoning_levels": [
 685        "low",
 686        "medium",
 687        "high"
 688      ],
 689      "default_reasoning_effort": "medium",
 690      "supports_attachments": true,
 691      "options": {}
 692    },
 693    {
 694      "id": "openai/gpt-5.2",
 695      "name": "GPT 5.2",
 696      "cost_per_1m_in": 1.75,
 697      "cost_per_1m_out": 14,
 698      "cost_per_1m_in_cached": 0.175,
 699      "cost_per_1m_out_cached": 0,
 700      "context_window": 400000,
 701      "default_max_tokens": 8000,
 702      "can_reason": true,
 703      "reasoning_levels": [
 704        "low",
 705        "medium",
 706        "high"
 707      ],
 708      "default_reasoning_effort": "medium",
 709      "supports_attachments": true,
 710      "options": {}
 711    },
 712    {
 713      "id": "openai/gpt-5.2-pro",
 714      "name": "GPT 5.2 ",
 715      "cost_per_1m_in": 21,
 716      "cost_per_1m_out": 168,
 717      "cost_per_1m_in_cached": 0,
 718      "cost_per_1m_out_cached": 0,
 719      "context_window": 400000,
 720      "default_max_tokens": 8000,
 721      "can_reason": true,
 722      "reasoning_levels": [
 723        "low",
 724        "medium",
 725        "high"
 726      ],
 727      "default_reasoning_effort": "medium",
 728      "supports_attachments": true,
 729      "options": {}
 730    },
 731    {
 732      "id": "openai/gpt-5.2-chat",
 733      "name": "GPT 5.2 Chat",
 734      "cost_per_1m_in": 1.75,
 735      "cost_per_1m_out": 14,
 736      "cost_per_1m_in_cached": 0.175,
 737      "cost_per_1m_out_cached": 0,
 738      "context_window": 128000,
 739      "default_max_tokens": 8000,
 740      "can_reason": true,
 741      "reasoning_levels": [
 742        "low",
 743        "medium",
 744        "high"
 745      ],
 746      "default_reasoning_effort": "medium",
 747      "supports_attachments": true,
 748      "options": {}
 749    },
 750    {
 751      "id": "openai/gpt-5.2-codex",
 752      "name": "GPT 5.2 Codex",
 753      "cost_per_1m_in": 1.75,
 754      "cost_per_1m_out": 14,
 755      "cost_per_1m_in_cached": 0.175,
 756      "cost_per_1m_out_cached": 0,
 757      "context_window": 400000,
 758      "default_max_tokens": 8000,
 759      "can_reason": true,
 760      "reasoning_levels": [
 761        "low",
 762        "medium",
 763        "high"
 764      ],
 765      "default_reasoning_effort": "medium",
 766      "supports_attachments": true,
 767      "options": {}
 768    },
 769    {
 770      "id": "openai/gpt-5.3-codex",
 771      "name": "GPT 5.3 Codex",
 772      "cost_per_1m_in": 1.75,
 773      "cost_per_1m_out": 14,
 774      "cost_per_1m_in_cached": 0.175,
 775      "cost_per_1m_out_cached": 0,
 776      "context_window": 400000,
 777      "default_max_tokens": 8000,
 778      "can_reason": true,
 779      "reasoning_levels": [
 780        "low",
 781        "medium",
 782        "high"
 783      ],
 784      "default_reasoning_effort": "medium",
 785      "supports_attachments": true,
 786      "options": {}
 787    },
 788    {
 789      "id": "openai/gpt-5.4",
 790      "name": "GPT 5.4",
 791      "cost_per_1m_in": 2.5,
 792      "cost_per_1m_out": 15,
 793      "cost_per_1m_in_cached": 0.25,
 794      "cost_per_1m_out_cached": 0,
 795      "context_window": 1050000,
 796      "default_max_tokens": 8000,
 797      "can_reason": true,
 798      "reasoning_levels": [
 799        "low",
 800        "medium",
 801        "high"
 802      ],
 803      "default_reasoning_effort": "medium",
 804      "supports_attachments": true,
 805      "options": {}
 806    },
 807    {
 808      "id": "openai/gpt-5.4-mini",
 809      "name": "GPT 5.4 Mini",
 810      "cost_per_1m_in": 0.75,
 811      "cost_per_1m_out": 4.5,
 812      "cost_per_1m_in_cached": 0.075,
 813      "cost_per_1m_out_cached": 0,
 814      "context_window": 400000,
 815      "default_max_tokens": 8000,
 816      "can_reason": true,
 817      "reasoning_levels": [
 818        "low",
 819        "medium",
 820        "high"
 821      ],
 822      "default_reasoning_effort": "medium",
 823      "supports_attachments": true,
 824      "options": {}
 825    },
 826    {
 827      "id": "openai/gpt-5.4-nano",
 828      "name": "GPT 5.4 Nano",
 829      "cost_per_1m_in": 0.2,
 830      "cost_per_1m_out": 1.25,
 831      "cost_per_1m_in_cached": 0.02,
 832      "cost_per_1m_out_cached": 0,
 833      "context_window": 400000,
 834      "default_max_tokens": 8000,
 835      "can_reason": true,
 836      "reasoning_levels": [
 837        "low",
 838        "medium",
 839        "high"
 840      ],
 841      "default_reasoning_effort": "medium",
 842      "supports_attachments": true,
 843      "options": {}
 844    },
 845    {
 846      "id": "openai/gpt-5.4-pro",
 847      "name": "GPT 5.4 Pro",
 848      "cost_per_1m_in": 30,
 849      "cost_per_1m_out": 180,
 850      "cost_per_1m_in_cached": 0,
 851      "cost_per_1m_out_cached": 0,
 852      "context_window": 1050000,
 853      "default_max_tokens": 8000,
 854      "can_reason": true,
 855      "reasoning_levels": [
 856        "low",
 857        "medium",
 858        "high"
 859      ],
 860      "default_reasoning_effort": "medium",
 861      "supports_attachments": true,
 862      "options": {}
 863    },
 864    {
 865      "id": "openai/gpt-4-turbo",
 866      "name": "GPT-4 Turbo",
 867      "cost_per_1m_in": 10,
 868      "cost_per_1m_out": 30,
 869      "cost_per_1m_in_cached": 0,
 870      "cost_per_1m_out_cached": 0,
 871      "context_window": 128000,
 872      "default_max_tokens": 4096,
 873      "can_reason": false,
 874      "supports_attachments": true,
 875      "options": {}
 876    },
 877    {
 878      "id": "openai/gpt-4.1",
 879      "name": "GPT-4.1",
 880      "cost_per_1m_in": 2,
 881      "cost_per_1m_out": 8,
 882      "cost_per_1m_in_cached": 0.5,
 883      "cost_per_1m_out_cached": 0,
 884      "context_window": 1047576,
 885      "default_max_tokens": 8000,
 886      "can_reason": false,
 887      "supports_attachments": true,
 888      "options": {}
 889    },
 890    {
 891      "id": "openai/gpt-4.1-mini",
 892      "name": "GPT-4.1 mini",
 893      "cost_per_1m_in": 0.4,
 894      "cost_per_1m_out": 1.6,
 895      "cost_per_1m_in_cached": 0.1,
 896      "cost_per_1m_out_cached": 0,
 897      "context_window": 1047576,
 898      "default_max_tokens": 8000,
 899      "can_reason": false,
 900      "supports_attachments": true,
 901      "options": {}
 902    },
 903    {
 904      "id": "openai/gpt-4.1-nano",
 905      "name": "GPT-4.1 nano",
 906      "cost_per_1m_in": 0.1,
 907      "cost_per_1m_out": 0.4,
 908      "cost_per_1m_in_cached": 0.025,
 909      "cost_per_1m_out_cached": 0,
 910      "context_window": 1047576,
 911      "default_max_tokens": 8000,
 912      "can_reason": false,
 913      "supports_attachments": true,
 914      "options": {}
 915    },
 916    {
 917      "id": "openai/gpt-4o",
 918      "name": "GPT-4o",
 919      "cost_per_1m_in": 2.5,
 920      "cost_per_1m_out": 10,
 921      "cost_per_1m_in_cached": 1.25,
 922      "cost_per_1m_out_cached": 0,
 923      "context_window": 128000,
 924      "default_max_tokens": 8000,
 925      "can_reason": false,
 926      "supports_attachments": true,
 927      "options": {}
 928    },
 929    {
 930      "id": "openai/gpt-4o-mini",
 931      "name": "GPT-4o mini",
 932      "cost_per_1m_in": 0.15,
 933      "cost_per_1m_out": 0.6,
 934      "cost_per_1m_in_cached": 0.075,
 935      "cost_per_1m_out_cached": 0,
 936      "context_window": 128000,
 937      "default_max_tokens": 8000,
 938      "can_reason": false,
 939      "supports_attachments": true,
 940      "options": {}
 941    },
 942    {
 943      "id": "openai/gpt-5",
 944      "name": "GPT-5",
 945      "cost_per_1m_in": 1.25,
 946      "cost_per_1m_out": 10,
 947      "cost_per_1m_in_cached": 0.125,
 948      "cost_per_1m_out_cached": 0,
 949      "context_window": 400000,
 950      "default_max_tokens": 8000,
 951      "can_reason": true,
 952      "reasoning_levels": [
 953        "low",
 954        "medium",
 955        "high"
 956      ],
 957      "default_reasoning_effort": "medium",
 958      "supports_attachments": true,
 959      "options": {}
 960    },
 961    {
 962      "id": "openai/gpt-5-mini",
 963      "name": "GPT-5 mini",
 964      "cost_per_1m_in": 0.25,
 965      "cost_per_1m_out": 2,
 966      "cost_per_1m_in_cached": 0.025,
 967      "cost_per_1m_out_cached": 0,
 968      "context_window": 400000,
 969      "default_max_tokens": 8000,
 970      "can_reason": true,
 971      "reasoning_levels": [
 972        "low",
 973        "medium",
 974        "high"
 975      ],
 976      "default_reasoning_effort": "medium",
 977      "supports_attachments": true,
 978      "options": {}
 979    },
 980    {
 981      "id": "openai/gpt-5-nano",
 982      "name": "GPT-5 nano",
 983      "cost_per_1m_in": 0.05,
 984      "cost_per_1m_out": 0.4,
 985      "cost_per_1m_in_cached": 0.005,
 986      "cost_per_1m_out_cached": 0,
 987      "context_window": 400000,
 988      "default_max_tokens": 8000,
 989      "can_reason": true,
 990      "reasoning_levels": [
 991        "low",
 992        "medium",
 993        "high"
 994      ],
 995      "default_reasoning_effort": "medium",
 996      "supports_attachments": true,
 997      "options": {}
 998    },
 999    {
1000      "id": "openai/gpt-5-pro",
1001      "name": "GPT-5 pro",
1002      "cost_per_1m_in": 15,
1003      "cost_per_1m_out": 120,
1004      "cost_per_1m_in_cached": 0,
1005      "cost_per_1m_out_cached": 0,
1006      "context_window": 400000,
1007      "default_max_tokens": 8000,
1008      "can_reason": true,
1009      "reasoning_levels": [
1010        "low",
1011        "medium",
1012        "high"
1013      ],
1014      "default_reasoning_effort": "medium",
1015      "supports_attachments": true,
1016      "options": {}
1017    },
1018    {
1019      "id": "openai/gpt-5-codex",
1020      "name": "GPT-5-Codex",
1021      "cost_per_1m_in": 1.25,
1022      "cost_per_1m_out": 10,
1023      "cost_per_1m_in_cached": 0.125,
1024      "cost_per_1m_out_cached": 0,
1025      "context_window": 400000,
1026      "default_max_tokens": 8000,
1027      "can_reason": true,
1028      "reasoning_levels": [
1029        "low",
1030        "medium",
1031        "high"
1032      ],
1033      "default_reasoning_effort": "medium",
1034      "supports_attachments": false,
1035      "options": {}
1036    },
1037    {
1038      "id": "openai/gpt-5.1-instant",
1039      "name": "GPT-5.1 Instant",
1040      "cost_per_1m_in": 1.25,
1041      "cost_per_1m_out": 10,
1042      "cost_per_1m_in_cached": 0.125,
1043      "cost_per_1m_out_cached": 0,
1044      "context_window": 128000,
1045      "default_max_tokens": 8000,
1046      "can_reason": true,
1047      "reasoning_levels": [
1048        "low",
1049        "medium",
1050        "high"
1051      ],
1052      "default_reasoning_effort": "medium",
1053      "supports_attachments": true,
1054      "options": {}
1055    },
1056    {
1057      "id": "openai/gpt-5.1-codex",
1058      "name": "GPT-5.1-Codex",
1059      "cost_per_1m_in": 1.25,
1060      "cost_per_1m_out": 10,
1061      "cost_per_1m_in_cached": 0.125,
1062      "cost_per_1m_out_cached": 0,
1063      "context_window": 400000,
1064      "default_max_tokens": 8000,
1065      "can_reason": true,
1066      "reasoning_levels": [
1067        "low",
1068        "medium",
1069        "high"
1070      ],
1071      "default_reasoning_effort": "medium",
1072      "supports_attachments": true,
1073      "options": {}
1074    },
1075    {
1076      "id": "openai/gpt-5.3-chat",
1077      "name": "GPT-5.3 Chat",
1078      "cost_per_1m_in": 1.75,
1079      "cost_per_1m_out": 14,
1080      "cost_per_1m_in_cached": 0.175,
1081      "cost_per_1m_out_cached": 0,
1082      "context_window": 128000,
1083      "default_max_tokens": 8000,
1084      "can_reason": true,
1085      "reasoning_levels": [
1086        "low",
1087        "medium",
1088        "high"
1089      ],
1090      "default_reasoning_effort": "medium",
1091      "supports_attachments": true,
1092      "options": {}
1093    },
1094    {
1095      "id": "google/gemini-2.0-flash",
1096      "name": "Gemini 2.0 Flash",
1097      "cost_per_1m_in": 0.15,
1098      "cost_per_1m_out": 0.6,
1099      "cost_per_1m_in_cached": 0.025,
1100      "cost_per_1m_out_cached": 0,
1101      "context_window": 1048576,
1102      "default_max_tokens": 8000,
1103      "can_reason": false,
1104      "supports_attachments": true,
1105      "options": {}
1106    },
1107    {
1108      "id": "google/gemini-2.0-flash-lite",
1109      "name": "Gemini 2.0 Flash Lite",
1110      "cost_per_1m_in": 0.075,
1111      "cost_per_1m_out": 0.3,
1112      "cost_per_1m_in_cached": 0.02,
1113      "cost_per_1m_out_cached": 0,
1114      "context_window": 1048576,
1115      "default_max_tokens": 8000,
1116      "can_reason": false,
1117      "supports_attachments": true,
1118      "options": {}
1119    },
1120    {
1121      "id": "google/gemini-2.5-flash",
1122      "name": "Gemini 2.5 Flash",
1123      "cost_per_1m_in": 0.3,
1124      "cost_per_1m_out": 2.5,
1125      "cost_per_1m_in_cached": 0.03,
1126      "cost_per_1m_out_cached": 0,
1127      "context_window": 1000000,
1128      "default_max_tokens": 8000,
1129      "can_reason": true,
1130      "reasoning_levels": [
1131        "low",
1132        "medium",
1133        "high"
1134      ],
1135      "default_reasoning_effort": "medium",
1136      "supports_attachments": true,
1137      "options": {}
1138    },
1139    {
1140      "id": "google/gemini-2.5-flash-lite",
1141      "name": "Gemini 2.5 Flash Lite",
1142      "cost_per_1m_in": 0.1,
1143      "cost_per_1m_out": 0.4,
1144      "cost_per_1m_in_cached": 0.01,
1145      "cost_per_1m_out_cached": 0,
1146      "context_window": 1048576,
1147      "default_max_tokens": 8000,
1148      "can_reason": true,
1149      "reasoning_levels": [
1150        "low",
1151        "medium",
1152        "high"
1153      ],
1154      "default_reasoning_effort": "medium",
1155      "supports_attachments": true,
1156      "options": {}
1157    },
1158    {
1159      "id": "google/gemini-2.5-pro",
1160      "name": "Gemini 2.5 Pro",
1161      "cost_per_1m_in": 1.25,
1162      "cost_per_1m_out": 10,
1163      "cost_per_1m_in_cached": 0.125,
1164      "cost_per_1m_out_cached": 0,
1165      "context_window": 1048576,
1166      "default_max_tokens": 8000,
1167      "can_reason": true,
1168      "reasoning_levels": [
1169        "low",
1170        "medium",
1171        "high"
1172      ],
1173      "default_reasoning_effort": "medium",
1174      "supports_attachments": true,
1175      "options": {}
1176    },
1177    {
1178      "id": "google/gemini-3-flash",
1179      "name": "Gemini 3 Flash",
1180      "cost_per_1m_in": 0.5,
1181      "cost_per_1m_out": 3,
1182      "cost_per_1m_in_cached": 0.05,
1183      "cost_per_1m_out_cached": 0,
1184      "context_window": 1000000,
1185      "default_max_tokens": 8000,
1186      "can_reason": true,
1187      "reasoning_levels": [
1188        "low",
1189        "medium",
1190        "high"
1191      ],
1192      "default_reasoning_effort": "medium",
1193      "supports_attachments": true,
1194      "options": {}
1195    },
1196    {
1197      "id": "google/gemini-3-pro-preview",
1198      "name": "Gemini 3 Pro Preview",
1199      "cost_per_1m_in": 2,
1200      "cost_per_1m_out": 12,
1201      "cost_per_1m_in_cached": 0.2,
1202      "cost_per_1m_out_cached": 0,
1203      "context_window": 1000000,
1204      "default_max_tokens": 8000,
1205      "can_reason": true,
1206      "reasoning_levels": [
1207        "low",
1208        "medium",
1209        "high"
1210      ],
1211      "default_reasoning_effort": "medium",
1212      "supports_attachments": true,
1213      "options": {}
1214    },
1215    {
1216      "id": "google/gemini-3.1-flash-lite-preview",
1217      "name": "Gemini 3.1 Flash Lite Preview",
1218      "cost_per_1m_in": 0.25,
1219      "cost_per_1m_out": 1.5,
1220      "cost_per_1m_in_cached": 0,
1221      "cost_per_1m_out_cached": 0,
1222      "context_window": 1000000,
1223      "default_max_tokens": 8000,
1224      "can_reason": true,
1225      "reasoning_levels": [
1226        "low",
1227        "medium",
1228        "high"
1229      ],
1230      "default_reasoning_effort": "medium",
1231      "supports_attachments": true,
1232      "options": {}
1233    },
1234    {
1235      "id": "google/gemini-3.1-pro-preview",
1236      "name": "Gemini 3.1 Pro Preview",
1237      "cost_per_1m_in": 2,
1238      "cost_per_1m_out": 12,
1239      "cost_per_1m_in_cached": 0.2,
1240      "cost_per_1m_out_cached": 0,
1241      "context_window": 1000000,
1242      "default_max_tokens": 8000,
1243      "can_reason": true,
1244      "reasoning_levels": [
1245        "low",
1246        "medium",
1247        "high"
1248      ],
1249      "default_reasoning_effort": "medium",
1250      "supports_attachments": true,
1251      "options": {}
1252    },
1253    {
1254      "id": "xai/grok-2-vision",
1255      "name": "Grok 2 Vision",
1256      "cost_per_1m_in": 2,
1257      "cost_per_1m_out": 10,
1258      "cost_per_1m_in_cached": 0,
1259      "cost_per_1m_out_cached": 0,
1260      "context_window": 32768,
1261      "default_max_tokens": 8000,
1262      "can_reason": false,
1263      "supports_attachments": true,
1264      "options": {}
1265    },
1266    {
1267      "id": "xai/grok-3",
1268      "name": "Grok 3 Beta",
1269      "cost_per_1m_in": 3,
1270      "cost_per_1m_out": 15,
1271      "cost_per_1m_in_cached": 0.75,
1272      "cost_per_1m_out_cached": 0,
1273      "context_window": 131072,
1274      "default_max_tokens": 8000,
1275      "can_reason": false,
1276      "supports_attachments": false,
1277      "options": {}
1278    },
1279    {
1280      "id": "xai/grok-3-fast",
1281      "name": "Grok 3 Fast Beta",
1282      "cost_per_1m_in": 5,
1283      "cost_per_1m_out": 25,
1284      "cost_per_1m_in_cached": 1.25,
1285      "cost_per_1m_out_cached": 0,
1286      "context_window": 131072,
1287      "default_max_tokens": 8000,
1288      "can_reason": false,
1289      "supports_attachments": false,
1290      "options": {}
1291    },
1292    {
1293      "id": "xai/grok-3-mini",
1294      "name": "Grok 3 Mini Beta",
1295      "cost_per_1m_in": 0.3,
1296      "cost_per_1m_out": 0.5,
1297      "cost_per_1m_in_cached": 0.075,
1298      "cost_per_1m_out_cached": 0,
1299      "context_window": 131072,
1300      "default_max_tokens": 8000,
1301      "can_reason": false,
1302      "supports_attachments": false,
1303      "options": {}
1304    },
1305    {
1306      "id": "xai/grok-3-mini-fast",
1307      "name": "Grok 3 Mini Fast Beta",
1308      "cost_per_1m_in": 0.6,
1309      "cost_per_1m_out": 4,
1310      "cost_per_1m_in_cached": 0,
1311      "cost_per_1m_out_cached": 0,
1312      "context_window": 131072,
1313      "default_max_tokens": 8000,
1314      "can_reason": false,
1315      "supports_attachments": false,
1316      "options": {}
1317    },
1318    {
1319      "id": "xai/grok-4",
1320      "name": "Grok 4",
1321      "cost_per_1m_in": 3,
1322      "cost_per_1m_out": 15,
1323      "cost_per_1m_in_cached": 0.75,
1324      "cost_per_1m_out_cached": 0,
1325      "context_window": 256000,
1326      "default_max_tokens": 8000,
1327      "can_reason": true,
1328      "reasoning_levels": [
1329        "low",
1330        "medium",
1331        "high"
1332      ],
1333      "default_reasoning_effort": "medium",
1334      "supports_attachments": true,
1335      "options": {}
1336    },
1337    {
1338      "id": "xai/grok-4-fast-non-reasoning",
1339      "name": "Grok 4 Fast Non-Reasoning",
1340      "cost_per_1m_in": 0.2,
1341      "cost_per_1m_out": 0.5,
1342      "cost_per_1m_in_cached": 0.05,
1343      "cost_per_1m_out_cached": 0,
1344      "context_window": 2000000,
1345      "default_max_tokens": 8000,
1346      "can_reason": false,
1347      "supports_attachments": false,
1348      "options": {}
1349    },
1350    {
1351      "id": "xai/grok-4-fast-reasoning",
1352      "name": "Grok 4 Fast Reasoning",
1353      "cost_per_1m_in": 0.2,
1354      "cost_per_1m_out": 0.5,
1355      "cost_per_1m_in_cached": 0.05,
1356      "cost_per_1m_out_cached": 0,
1357      "context_window": 2000000,
1358      "default_max_tokens": 8000,
1359      "can_reason": true,
1360      "reasoning_levels": [
1361        "low",
1362        "medium",
1363        "high"
1364      ],
1365      "default_reasoning_effort": "medium",
1366      "supports_attachments": false,
1367      "options": {}
1368    },
1369    {
1370      "id": "xai/grok-4.1-fast-non-reasoning",
1371      "name": "Grok 4.1 Fast Non-Reasoning",
1372      "cost_per_1m_in": 0.2,
1373      "cost_per_1m_out": 0.5,
1374      "cost_per_1m_in_cached": 0.05,
1375      "cost_per_1m_out_cached": 0,
1376      "context_window": 2000000,
1377      "default_max_tokens": 8000,
1378      "can_reason": false,
1379      "supports_attachments": false,
1380      "options": {}
1381    },
1382    {
1383      "id": "xai/grok-4.1-fast-reasoning",
1384      "name": "Grok 4.1 Fast Reasoning",
1385      "cost_per_1m_in": 0.2,
1386      "cost_per_1m_out": 0.5,
1387      "cost_per_1m_in_cached": 0.05,
1388      "cost_per_1m_out_cached": 0,
1389      "context_window": 2000000,
1390      "default_max_tokens": 8000,
1391      "can_reason": true,
1392      "reasoning_levels": [
1393        "low",
1394        "medium",
1395        "high"
1396      ],
1397      "default_reasoning_effort": "medium",
1398      "supports_attachments": false,
1399      "options": {}
1400    },
1401    {
1402      "id": "xai/grok-4.20-non-reasoning-beta",
1403      "name": "Grok 4.20 Beta Non-Reasoning",
1404      "cost_per_1m_in": 2,
1405      "cost_per_1m_out": 6,
1406      "cost_per_1m_in_cached": 0.2,
1407      "cost_per_1m_out_cached": 0,
1408      "context_window": 2000000,
1409      "default_max_tokens": 8000,
1410      "can_reason": false,
1411      "supports_attachments": true,
1412      "options": {}
1413    },
1414    {
1415      "id": "xai/grok-4.20-reasoning-beta",
1416      "name": "Grok 4.20 Beta Reasoning",
1417      "cost_per_1m_in": 2,
1418      "cost_per_1m_out": 6,
1419      "cost_per_1m_in_cached": 0.2,
1420      "cost_per_1m_out_cached": 0,
1421      "context_window": 2000000,
1422      "default_max_tokens": 8000,
1423      "can_reason": true,
1424      "reasoning_levels": [
1425        "low",
1426        "medium",
1427        "high"
1428      ],
1429      "default_reasoning_effort": "medium",
1430      "supports_attachments": true,
1431      "options": {}
1432    },
1433    {
1434      "id": "xai/grok-4.20-multi-agent-beta",
1435      "name": "Grok 4.20 Multi Agent Beta",
1436      "cost_per_1m_in": 2,
1437      "cost_per_1m_out": 6,
1438      "cost_per_1m_in_cached": 0.2,
1439      "cost_per_1m_out_cached": 0,
1440      "context_window": 2000000,
1441      "default_max_tokens": 8000,
1442      "can_reason": true,
1443      "reasoning_levels": [
1444        "low",
1445        "medium",
1446        "high"
1447      ],
1448      "default_reasoning_effort": "medium",
1449      "supports_attachments": false,
1450      "options": {}
1451    },
1452    {
1453      "id": "xai/grok-4.20-multi-agent",
1454      "name": "Grok 4.20 Multi-Agent",
1455      "cost_per_1m_in": 2,
1456      "cost_per_1m_out": 6,
1457      "cost_per_1m_in_cached": 0.2,
1458      "cost_per_1m_out_cached": 0,
1459      "context_window": 2000000,
1460      "default_max_tokens": 8000,
1461      "can_reason": true,
1462      "reasoning_levels": [
1463        "low",
1464        "medium",
1465        "high"
1466      ],
1467      "default_reasoning_effort": "medium",
1468      "supports_attachments": false,
1469      "options": {}
1470    },
1471    {
1472      "id": "xai/grok-4.20-non-reasoning",
1473      "name": "Grok 4.20 Non-Reasoning",
1474      "cost_per_1m_in": 2,
1475      "cost_per_1m_out": 6,
1476      "cost_per_1m_in_cached": 0.2,
1477      "cost_per_1m_out_cached": 0,
1478      "context_window": 2000000,
1479      "default_max_tokens": 8000,
1480      "can_reason": false,
1481      "supports_attachments": true,
1482      "options": {}
1483    },
1484    {
1485      "id": "xai/grok-4.20-reasoning",
1486      "name": "Grok 4.20 Reasoning",
1487      "cost_per_1m_in": 2,
1488      "cost_per_1m_out": 6,
1489      "cost_per_1m_in_cached": 0.2,
1490      "cost_per_1m_out_cached": 0,
1491      "context_window": 2000000,
1492      "default_max_tokens": 8000,
1493      "can_reason": true,
1494      "reasoning_levels": [
1495        "low",
1496        "medium",
1497        "high"
1498      ],
1499      "default_reasoning_effort": "medium",
1500      "supports_attachments": true,
1501      "options": {}
1502    },
1503    {
1504      "id": "xai/grok-code-fast-1",
1505      "name": "Grok Code Fast 1",
1506      "cost_per_1m_in": 0.2,
1507      "cost_per_1m_out": 1.5,
1508      "cost_per_1m_in_cached": 0.02,
1509      "cost_per_1m_out_cached": 0,
1510      "context_window": 256000,
1511      "default_max_tokens": 8000,
1512      "can_reason": true,
1513      "reasoning_levels": [
1514        "low",
1515        "medium",
1516        "high"
1517      ],
1518      "default_reasoning_effort": "medium",
1519      "supports_attachments": false,
1520      "options": {}
1521    },
1522    {
1523      "id": "prime-intellect/intellect-3",
1524      "name": "INTELLECT 3",
1525      "cost_per_1m_in": 0.2,
1526      "cost_per_1m_out": 1.1,
1527      "cost_per_1m_in_cached": 0,
1528      "cost_per_1m_out_cached": 0,
1529      "context_window": 131072,
1530      "default_max_tokens": 8000,
1531      "can_reason": true,
1532      "reasoning_levels": [
1533        "low",
1534        "medium",
1535        "high"
1536      ],
1537      "default_reasoning_effort": "medium",
1538      "supports_attachments": false,
1539      "options": {}
1540    },
1541    {
1542      "id": "kwaipilot/kat-coder-pro-v2",
1543      "name": "Kat Coder Pro V2",
1544      "cost_per_1m_in": 0.3,
1545      "cost_per_1m_out": 1.2,
1546      "cost_per_1m_in_cached": 0.06,
1547      "cost_per_1m_out_cached": 0,
1548      "context_window": 256000,
1549      "default_max_tokens": 8000,
1550      "can_reason": true,
1551      "reasoning_levels": [
1552        "low",
1553        "medium",
1554        "high"
1555      ],
1556      "default_reasoning_effort": "medium",
1557      "supports_attachments": false,
1558      "options": {}
1559    },
1560    {
1561      "id": "moonshotai/kimi-k2",
1562      "name": "Kimi K2",
1563      "cost_per_1m_in": 0.6,
1564      "cost_per_1m_out": 2.5,
1565      "cost_per_1m_in_cached": 0.15,
1566      "cost_per_1m_out_cached": 0,
1567      "context_window": 131072,
1568      "default_max_tokens": 8000,
1569      "can_reason": false,
1570      "supports_attachments": false,
1571      "options": {}
1572    },
1573    {
1574      "id": "moonshotai/kimi-k2-0905",
1575      "name": "Kimi K2 0905",
1576      "cost_per_1m_in": 0.6,
1577      "cost_per_1m_out": 2.5,
1578      "cost_per_1m_in_cached": 0.15,
1579      "cost_per_1m_out_cached": 0,
1580      "context_window": 256000,
1581      "default_max_tokens": 8000,
1582      "can_reason": false,
1583      "supports_attachments": false,
1584      "options": {}
1585    },
1586    {
1587      "id": "moonshotai/kimi-k2-thinking",
1588      "name": "Kimi K2 Thinking",
1589      "cost_per_1m_in": 0.6,
1590      "cost_per_1m_out": 2.5,
1591      "cost_per_1m_in_cached": 0.15,
1592      "cost_per_1m_out_cached": 0,
1593      "context_window": 262114,
1594      "default_max_tokens": 8000,
1595      "can_reason": true,
1596      "reasoning_levels": [
1597        "low",
1598        "medium",
1599        "high"
1600      ],
1601      "default_reasoning_effort": "medium",
1602      "supports_attachments": false,
1603      "options": {}
1604    },
1605    {
1606      "id": "moonshotai/kimi-k2-thinking-turbo",
1607      "name": "Kimi K2 Thinking Turbo",
1608      "cost_per_1m_in": 1.15,
1609      "cost_per_1m_out": 8,
1610      "cost_per_1m_in_cached": 0.15,
1611      "cost_per_1m_out_cached": 0,
1612      "context_window": 262114,
1613      "default_max_tokens": 8000,
1614      "can_reason": true,
1615      "reasoning_levels": [
1616        "low",
1617        "medium",
1618        "high"
1619      ],
1620      "default_reasoning_effort": "medium",
1621      "supports_attachments": false,
1622      "options": {}
1623    },
1624    {
1625      "id": "moonshotai/kimi-k2-turbo",
1626      "name": "Kimi K2 Turbo",
1627      "cost_per_1m_in": 1.15,
1628      "cost_per_1m_out": 8,
1629      "cost_per_1m_in_cached": 0.15,
1630      "cost_per_1m_out_cached": 0,
1631      "context_window": 256000,
1632      "default_max_tokens": 8000,
1633      "can_reason": false,
1634      "supports_attachments": false,
1635      "options": {}
1636    },
1637    {
1638      "id": "moonshotai/kimi-k2.5",
1639      "name": "Kimi K2.5",
1640      "cost_per_1m_in": 0.6,
1641      "cost_per_1m_out": 3,
1642      "cost_per_1m_in_cached": 0.1,
1643      "cost_per_1m_out_cached": 0,
1644      "context_window": 262114,
1645      "default_max_tokens": 8000,
1646      "can_reason": true,
1647      "reasoning_levels": [
1648        "low",
1649        "medium",
1650        "high"
1651      ],
1652      "default_reasoning_effort": "medium",
1653      "supports_attachments": true,
1654      "options": {}
1655    },
1656    {
1657      "id": "meta/llama-3.1-70b",
1658      "name": "Llama 3.1 70B Instruct",
1659      "cost_per_1m_in": 0.72,
1660      "cost_per_1m_out": 0.72,
1661      "cost_per_1m_in_cached": 0,
1662      "cost_per_1m_out_cached": 0,
1663      "context_window": 128000,
1664      "default_max_tokens": 8000,
1665      "can_reason": false,
1666      "supports_attachments": false,
1667      "options": {}
1668    },
1669    {
1670      "id": "meta/llama-3.1-8b",
1671      "name": "Llama 3.1 8B Instruct",
1672      "cost_per_1m_in": 0.22,
1673      "cost_per_1m_out": 0.22,
1674      "cost_per_1m_in_cached": 0,
1675      "cost_per_1m_out_cached": 0,
1676      "context_window": 128000,
1677      "default_max_tokens": 8000,
1678      "can_reason": false,
1679      "supports_attachments": false,
1680      "options": {}
1681    },
1682    {
1683      "id": "meta/llama-3.2-11b",
1684      "name": "Llama 3.2 11B Vision Instruct",
1685      "cost_per_1m_in": 0.16,
1686      "cost_per_1m_out": 0.16,
1687      "cost_per_1m_in_cached": 0,
1688      "cost_per_1m_out_cached": 0,
1689      "context_window": 128000,
1690      "default_max_tokens": 8000,
1691      "can_reason": false,
1692      "supports_attachments": true,
1693      "options": {}
1694    },
1695    {
1696      "id": "meta/llama-3.2-90b",
1697      "name": "Llama 3.2 90B Vision Instruct",
1698      "cost_per_1m_in": 0.72,
1699      "cost_per_1m_out": 0.72,
1700      "cost_per_1m_in_cached": 0,
1701      "cost_per_1m_out_cached": 0,
1702      "context_window": 128000,
1703      "default_max_tokens": 8000,
1704      "can_reason": false,
1705      "supports_attachments": true,
1706      "options": {}
1707    },
1708    {
1709      "id": "meta/llama-3.3-70b",
1710      "name": "Llama 3.3 70B Instruct",
1711      "cost_per_1m_in": 0.72,
1712      "cost_per_1m_out": 0.72,
1713      "cost_per_1m_in_cached": 0,
1714      "cost_per_1m_out_cached": 0,
1715      "context_window": 128000,
1716      "default_max_tokens": 8000,
1717      "can_reason": false,
1718      "supports_attachments": false,
1719      "options": {}
1720    },
1721    {
1722      "id": "meta/llama-4-maverick",
1723      "name": "Llama 4 Maverick 17B Instruct",
1724      "cost_per_1m_in": 0.35,
1725      "cost_per_1m_out": 1.15,
1726      "cost_per_1m_in_cached": 0,
1727      "cost_per_1m_out_cached": 0,
1728      "context_window": 524288,
1729      "default_max_tokens": 8000,
1730      "can_reason": false,
1731      "supports_attachments": true,
1732      "options": {}
1733    },
1734    {
1735      "id": "meta/llama-4-scout",
1736      "name": "Llama 4 Scout 17B Instruct",
1737      "cost_per_1m_in": 0.17,
1738      "cost_per_1m_out": 0.66,
1739      "cost_per_1m_in_cached": 0,
1740      "cost_per_1m_out_cached": 0,
1741      "context_window": 128000,
1742      "default_max_tokens": 8000,
1743      "can_reason": false,
1744      "supports_attachments": true,
1745      "options": {}
1746    },
1747    {
1748      "id": "meituan/longcat-flash-chat",
1749      "name": "LongCat Flash Chat",
1750      "cost_per_1m_in": 0,
1751      "cost_per_1m_out": 0,
1752      "cost_per_1m_in_cached": 0,
1753      "cost_per_1m_out_cached": 0,
1754      "context_window": 128000,
1755      "default_max_tokens": 8000,
1756      "can_reason": false,
1757      "supports_attachments": false,
1758      "options": {}
1759    },
1760    {
1761      "id": "meituan/longcat-flash-thinking",
1762      "name": "LongCat Flash Thinking",
1763      "cost_per_1m_in": 0.15,
1764      "cost_per_1m_out": 1.5,
1765      "cost_per_1m_in_cached": 0,
1766      "cost_per_1m_out_cached": 0,
1767      "context_window": 128000,
1768      "default_max_tokens": 8000,
1769      "can_reason": true,
1770      "reasoning_levels": [
1771        "low",
1772        "medium",
1773        "high"
1774      ],
1775      "default_reasoning_effort": "medium",
1776      "supports_attachments": false,
1777      "options": {}
1778    },
1779    {
1780      "id": "inception/mercury-2",
1781      "name": "Mercury 2",
1782      "cost_per_1m_in": 0.25,
1783      "cost_per_1m_out": 0.75,
1784      "cost_per_1m_in_cached": 0.025,
1785      "cost_per_1m_out_cached": 0,
1786      "context_window": 128000,
1787      "default_max_tokens": 8000,
1788      "can_reason": true,
1789      "reasoning_levels": [
1790        "low",
1791        "medium",
1792        "high"
1793      ],
1794      "default_reasoning_effort": "medium",
1795      "supports_attachments": false,
1796      "options": {}
1797    },
1798    {
1799      "id": "inception/mercury-coder-small",
1800      "name": "Mercury Coder Small Beta",
1801      "cost_per_1m_in": 0.25,
1802      "cost_per_1m_out": 1,
1803      "cost_per_1m_in_cached": 0,
1804      "cost_per_1m_out_cached": 0,
1805      "context_window": 32000,
1806      "default_max_tokens": 8000,
1807      "can_reason": false,
1808      "supports_attachments": false,
1809      "options": {}
1810    },
1811    {
1812      "id": "xiaomi/mimo-v2-flash",
1813      "name": "MiMo V2 Flash",
1814      "cost_per_1m_in": 0.09,
1815      "cost_per_1m_out": 0.29,
1816      "cost_per_1m_in_cached": 0.045,
1817      "cost_per_1m_out_cached": 0,
1818      "context_window": 262144,
1819      "default_max_tokens": 8000,
1820      "can_reason": true,
1821      "reasoning_levels": [
1822        "low",
1823        "medium",
1824        "high"
1825      ],
1826      "default_reasoning_effort": "medium",
1827      "supports_attachments": false,
1828      "options": {}
1829    },
1830    {
1831      "id": "xiaomi/mimo-v2-pro",
1832      "name": "MiMo V2 Pro",
1833      "cost_per_1m_in": 1,
1834      "cost_per_1m_out": 3,
1835      "cost_per_1m_in_cached": 0.2,
1836      "cost_per_1m_out_cached": 0,
1837      "context_window": 1000000,
1838      "default_max_tokens": 8000,
1839      "can_reason": true,
1840      "reasoning_levels": [
1841        "low",
1842        "medium",
1843        "high"
1844      ],
1845      "default_reasoning_effort": "medium",
1846      "supports_attachments": false,
1847      "options": {}
1848    },
1849    {
1850      "id": "minimax/minimax-m2",
1851      "name": "MiniMax M2",
1852      "cost_per_1m_in": 0.3,
1853      "cost_per_1m_out": 1.2,
1854      "cost_per_1m_in_cached": 0.03,
1855      "cost_per_1m_out_cached": 0.375,
1856      "context_window": 205000,
1857      "default_max_tokens": 8000,
1858      "can_reason": true,
1859      "reasoning_levels": [
1860        "low",
1861        "medium",
1862        "high"
1863      ],
1864      "default_reasoning_effort": "medium",
1865      "supports_attachments": false,
1866      "options": {}
1867    },
1868    {
1869      "id": "minimax/minimax-m2.1",
1870      "name": "MiniMax M2.1",
1871      "cost_per_1m_in": 0.3,
1872      "cost_per_1m_out": 1.2,
1873      "cost_per_1m_in_cached": 0.03,
1874      "cost_per_1m_out_cached": 0.375,
1875      "context_window": 204800,
1876      "default_max_tokens": 8000,
1877      "can_reason": true,
1878      "reasoning_levels": [
1879        "low",
1880        "medium",
1881        "high"
1882      ],
1883      "default_reasoning_effort": "medium",
1884      "supports_attachments": false,
1885      "options": {}
1886    },
1887    {
1888      "id": "minimax/minimax-m2.1-lightning",
1889      "name": "MiniMax M2.1 Lightning",
1890      "cost_per_1m_in": 0.3,
1891      "cost_per_1m_out": 2.4,
1892      "cost_per_1m_in_cached": 0.03,
1893      "cost_per_1m_out_cached": 0.375,
1894      "context_window": 204800,
1895      "default_max_tokens": 8000,
1896      "can_reason": true,
1897      "reasoning_levels": [
1898        "low",
1899        "medium",
1900        "high"
1901      ],
1902      "default_reasoning_effort": "medium",
1903      "supports_attachments": false,
1904      "options": {}
1905    },
1906    {
1907      "id": "minimax/minimax-m2.5",
1908      "name": "MiniMax M2.5",
1909      "cost_per_1m_in": 0.3,
1910      "cost_per_1m_out": 1.2,
1911      "cost_per_1m_in_cached": 0.03,
1912      "cost_per_1m_out_cached": 0.375,
1913      "context_window": 204800,
1914      "default_max_tokens": 8000,
1915      "can_reason": true,
1916      "reasoning_levels": [
1917        "low",
1918        "medium",
1919        "high"
1920      ],
1921      "default_reasoning_effort": "medium",
1922      "supports_attachments": false,
1923      "options": {}
1924    },
1925    {
1926      "id": "minimax/minimax-m2.5-highspeed",
1927      "name": "MiniMax M2.5 High Speed",
1928      "cost_per_1m_in": 0.6,
1929      "cost_per_1m_out": 2.4,
1930      "cost_per_1m_in_cached": 0.03,
1931      "cost_per_1m_out_cached": 0.375,
1932      "context_window": 204800,
1933      "default_max_tokens": 8000,
1934      "can_reason": true,
1935      "reasoning_levels": [
1936        "low",
1937        "medium",
1938        "high"
1939      ],
1940      "default_reasoning_effort": "medium",
1941      "supports_attachments": false,
1942      "options": {}
1943    },
1944    {
1945      "id": "minimax/minimax-m2.7-highspeed",
1946      "name": "MiniMax M2.7 High Speed",
1947      "cost_per_1m_in": 0.6,
1948      "cost_per_1m_out": 2.4,
1949      "cost_per_1m_in_cached": 0.06,
1950      "cost_per_1m_out_cached": 0.375,
1951      "context_window": 204800,
1952      "default_max_tokens": 8000,
1953      "can_reason": true,
1954      "reasoning_levels": [
1955        "low",
1956        "medium",
1957        "high"
1958      ],
1959      "default_reasoning_effort": "medium",
1960      "supports_attachments": true,
1961      "options": {}
1962    },
1963    {
1964      "id": "minimax/minimax-m2.7",
1965      "name": "Minimax M2.7",
1966      "cost_per_1m_in": 0.3,
1967      "cost_per_1m_out": 1.2,
1968      "cost_per_1m_in_cached": 0.06,
1969      "cost_per_1m_out_cached": 0.375,
1970      "context_window": 204800,
1971      "default_max_tokens": 8000,
1972      "can_reason": true,
1973      "reasoning_levels": [
1974        "low",
1975        "medium",
1976        "high"
1977      ],
1978      "default_reasoning_effort": "medium",
1979      "supports_attachments": true,
1980      "options": {}
1981    },
1982    {
1983      "id": "mistral/ministral-3b",
1984      "name": "Ministral 3B",
1985      "cost_per_1m_in": 0.1,
1986      "cost_per_1m_out": 0.1,
1987      "cost_per_1m_in_cached": 0,
1988      "cost_per_1m_out_cached": 0,
1989      "context_window": 128000,
1990      "default_max_tokens": 4000,
1991      "can_reason": false,
1992      "supports_attachments": false,
1993      "options": {}
1994    },
1995    {
1996      "id": "mistral/ministral-8b",
1997      "name": "Ministral 8B",
1998      "cost_per_1m_in": 0.15,
1999      "cost_per_1m_out": 0.15,
2000      "cost_per_1m_in_cached": 0,
2001      "cost_per_1m_out_cached": 0,
2002      "context_window": 128000,
2003      "default_max_tokens": 4000,
2004      "can_reason": false,
2005      "supports_attachments": false,
2006      "options": {}
2007    },
2008    {
2009      "id": "mistral/codestral",
2010      "name": "Mistral Codestral",
2011      "cost_per_1m_in": 0.3,
2012      "cost_per_1m_out": 0.9,
2013      "cost_per_1m_in_cached": 0,
2014      "cost_per_1m_out_cached": 0,
2015      "context_window": 128000,
2016      "default_max_tokens": 4000,
2017      "can_reason": false,
2018      "supports_attachments": false,
2019      "options": {}
2020    },
2021    {
2022      "id": "mistral/mistral-medium",
2023      "name": "Mistral Medium 3.1",
2024      "cost_per_1m_in": 0.4,
2025      "cost_per_1m_out": 2,
2026      "cost_per_1m_in_cached": 0,
2027      "cost_per_1m_out_cached": 0,
2028      "context_window": 128000,
2029      "default_max_tokens": 8000,
2030      "can_reason": false,
2031      "supports_attachments": true,
2032      "options": {}
2033    },
2034    {
2035      "id": "mistral/mistral-small",
2036      "name": "Mistral Small",
2037      "cost_per_1m_in": 0.1,
2038      "cost_per_1m_out": 0.3,
2039      "cost_per_1m_in_cached": 0,
2040      "cost_per_1m_out_cached": 0,
2041      "context_window": 32000,
2042      "default_max_tokens": 4000,
2043      "can_reason": false,
2044      "supports_attachments": true,
2045      "options": {}
2046    },
2047    {
2048      "id": "nvidia/nemotron-nano-12b-v2-vl",
2049      "name": "Nvidia Nemotron Nano 12B V2 VL",
2050      "cost_per_1m_in": 0.2,
2051      "cost_per_1m_out": 0.6,
2052      "cost_per_1m_in_cached": 0,
2053      "cost_per_1m_out_cached": 0,
2054      "context_window": 131072,
2055      "default_max_tokens": 8000,
2056      "can_reason": true,
2057      "reasoning_levels": [
2058        "low",
2059        "medium",
2060        "high"
2061      ],
2062      "default_reasoning_effort": "medium",
2063      "supports_attachments": true,
2064      "options": {}
2065    },
2066    {
2067      "id": "nvidia/nemotron-nano-9b-v2",
2068      "name": "Nvidia Nemotron Nano 9B V2",
2069      "cost_per_1m_in": 0.06,
2070      "cost_per_1m_out": 0.23,
2071      "cost_per_1m_in_cached": 0,
2072      "cost_per_1m_out_cached": 0,
2073      "context_window": 131072,
2074      "default_max_tokens": 8000,
2075      "can_reason": true,
2076      "reasoning_levels": [
2077        "low",
2078        "medium",
2079        "high"
2080      ],
2081      "default_reasoning_effort": "medium",
2082      "supports_attachments": false,
2083      "options": {}
2084    },
2085    {
2086      "id": "mistral/pixtral-12b",
2087      "name": "Pixtral 12B 2409",
2088      "cost_per_1m_in": 0.15,
2089      "cost_per_1m_out": 0.15,
2090      "cost_per_1m_in_cached": 0,
2091      "cost_per_1m_out_cached": 0,
2092      "context_window": 128000,
2093      "default_max_tokens": 4000,
2094      "can_reason": false,
2095      "supports_attachments": true,
2096      "options": {}
2097    },
2098    {
2099      "id": "mistral/pixtral-large",
2100      "name": "Pixtral Large",
2101      "cost_per_1m_in": 2,
2102      "cost_per_1m_out": 6,
2103      "cost_per_1m_in_cached": 0,
2104      "cost_per_1m_out_cached": 0,
2105      "context_window": 128000,
2106      "default_max_tokens": 4000,
2107      "can_reason": false,
2108      "supports_attachments": true,
2109      "options": {}
2110    },
2111    {
2112      "id": "alibaba/qwen-3-32b",
2113      "name": "Qwen 3 32B",
2114      "cost_per_1m_in": 0.16,
2115      "cost_per_1m_out": 0.64,
2116      "cost_per_1m_in_cached": 0,
2117      "cost_per_1m_out_cached": 0,
2118      "context_window": 128000,
2119      "default_max_tokens": 8000,
2120      "can_reason": true,
2121      "reasoning_levels": [
2122        "low",
2123        "medium",
2124        "high"
2125      ],
2126      "default_reasoning_effort": "medium",
2127      "supports_attachments": false,
2128      "options": {}
2129    },
2130    {
2131      "id": "alibaba/qwen3-coder-30b-a3b",
2132      "name": "Qwen 3 Coder 30B A3B Instruct",
2133      "cost_per_1m_in": 0.15,
2134      "cost_per_1m_out": 0.6,
2135      "cost_per_1m_in_cached": 0,
2136      "cost_per_1m_out_cached": 0,
2137      "context_window": 262144,
2138      "default_max_tokens": 8000,
2139      "can_reason": true,
2140      "reasoning_levels": [
2141        "low",
2142        "medium",
2143        "high"
2144      ],
2145      "default_reasoning_effort": "medium",
2146      "supports_attachments": false,
2147      "options": {}
2148    },
2149    {
2150      "id": "alibaba/qwen3-max-thinking",
2151      "name": "Qwen 3 Max Thinking",
2152      "cost_per_1m_in": 1.2,
2153      "cost_per_1m_out": 6,
2154      "cost_per_1m_in_cached": 0.24,
2155      "cost_per_1m_out_cached": 0,
2156      "context_window": 256000,
2157      "default_max_tokens": 8000,
2158      "can_reason": true,
2159      "reasoning_levels": [
2160        "low",
2161        "medium",
2162        "high"
2163      ],
2164      "default_reasoning_effort": "medium",
2165      "supports_attachments": false,
2166      "options": {}
2167    },
2168    {
2169      "id": "alibaba/qwen3.5-flash",
2170      "name": "Qwen 3.5 Flash",
2171      "cost_per_1m_in": 0.1,
2172      "cost_per_1m_out": 0.4,
2173      "cost_per_1m_in_cached": 0.001,
2174      "cost_per_1m_out_cached": 0.125,
2175      "context_window": 1000000,
2176      "default_max_tokens": 8000,
2177      "can_reason": true,
2178      "reasoning_levels": [
2179        "low",
2180        "medium",
2181        "high"
2182      ],
2183      "default_reasoning_effort": "medium",
2184      "supports_attachments": true,
2185      "options": {}
2186    },
2187    {
2188      "id": "alibaba/qwen3.5-plus",
2189      "name": "Qwen 3.5 Plus",
2190      "cost_per_1m_in": 0.4,
2191      "cost_per_1m_out": 2.4,
2192      "cost_per_1m_in_cached": 0.04,
2193      "cost_per_1m_out_cached": 0.5,
2194      "context_window": 1000000,
2195      "default_max_tokens": 8000,
2196      "can_reason": true,
2197      "reasoning_levels": [
2198        "low",
2199        "medium",
2200        "high"
2201      ],
2202      "default_reasoning_effort": "medium",
2203      "supports_attachments": true,
2204      "options": {}
2205    },
2206    {
2207      "id": "alibaba/qwen3-235b-a22b-thinking",
2208      "name": "Qwen3 235B A22B Thinking 2507",
2209      "cost_per_1m_in": 0.23,
2210      "cost_per_1m_out": 2.3,
2211      "cost_per_1m_in_cached": 0.2,
2212      "cost_per_1m_out_cached": 0,
2213      "context_window": 262114,
2214      "default_max_tokens": 8000,
2215      "can_reason": true,
2216      "reasoning_levels": [
2217        "low",
2218        "medium",
2219        "high"
2220      ],
2221      "default_reasoning_effort": "medium",
2222      "supports_attachments": true,
2223      "options": {}
2224    },
2225    {
2226      "id": "alibaba/qwen3-coder",
2227      "name": "Qwen3 Coder 480B A35B Instruct",
2228      "cost_per_1m_in": 1.5,
2229      "cost_per_1m_out": 7.5,
2230      "cost_per_1m_in_cached": 0.3,
2231      "cost_per_1m_out_cached": 0,
2232      "context_window": 262144,
2233      "default_max_tokens": 8000,
2234      "can_reason": false,
2235      "supports_attachments": false,
2236      "options": {}
2237    },
2238    {
2239      "id": "alibaba/qwen3-coder-next",
2240      "name": "Qwen3 Coder Next",
2241      "cost_per_1m_in": 0.5,
2242      "cost_per_1m_out": 1.2,
2243      "cost_per_1m_in_cached": 0,
2244      "cost_per_1m_out_cached": 0,
2245      "context_window": 256000,
2246      "default_max_tokens": 8000,
2247      "can_reason": false,
2248      "supports_attachments": false,
2249      "options": {}
2250    },
2251    {
2252      "id": "alibaba/qwen3-coder-plus",
2253      "name": "Qwen3 Coder Plus",
2254      "cost_per_1m_in": 1,
2255      "cost_per_1m_out": 5,
2256      "cost_per_1m_in_cached": 0.2,
2257      "cost_per_1m_out_cached": 0,
2258      "context_window": 1000000,
2259      "default_max_tokens": 8000,
2260      "can_reason": false,
2261      "supports_attachments": false,
2262      "options": {}
2263    },
2264    {
2265      "id": "alibaba/qwen3-max",
2266      "name": "Qwen3 Max",
2267      "cost_per_1m_in": 1.2,
2268      "cost_per_1m_out": 6,
2269      "cost_per_1m_in_cached": 0.24,
2270      "cost_per_1m_out_cached": 0,
2271      "context_window": 262144,
2272      "default_max_tokens": 8000,
2273      "can_reason": false,
2274      "supports_attachments": false,
2275      "options": {}
2276    },
2277    {
2278      "id": "alibaba/qwen3-max-preview",
2279      "name": "Qwen3 Max Preview",
2280      "cost_per_1m_in": 1.2,
2281      "cost_per_1m_out": 6,
2282      "cost_per_1m_in_cached": 0.24,
2283      "cost_per_1m_out_cached": 0,
2284      "context_window": 262144,
2285      "default_max_tokens": 8000,
2286      "can_reason": false,
2287      "supports_attachments": false,
2288      "options": {}
2289    },
2290    {
2291      "id": "alibaba/qwen3-vl-thinking",
2292      "name": "Qwen3 VL 235B A22B Thinking",
2293      "cost_per_1m_in": 0.22,
2294      "cost_per_1m_out": 0.88,
2295      "cost_per_1m_in_cached": 0,
2296      "cost_per_1m_out_cached": 0,
2297      "context_window": 256000,
2298      "default_max_tokens": 8000,
2299      "can_reason": true,
2300      "reasoning_levels": [
2301        "low",
2302        "medium",
2303        "high"
2304      ],
2305      "default_reasoning_effort": "medium",
2306      "supports_attachments": true,
2307      "options": {}
2308    },
2309    {
2310      "id": "alibaba/qwen-3-14b",
2311      "name": "Qwen3-14B",
2312      "cost_per_1m_in": 0.12,
2313      "cost_per_1m_out": 0.24,
2314      "cost_per_1m_in_cached": 0,
2315      "cost_per_1m_out_cached": 0,
2316      "context_window": 40960,
2317      "default_max_tokens": 8000,
2318      "can_reason": true,
2319      "reasoning_levels": [
2320        "low",
2321        "medium",
2322        "high"
2323      ],
2324      "default_reasoning_effort": "medium",
2325      "supports_attachments": false,
2326      "options": {}
2327    },
2328    {
2329      "id": "alibaba/qwen-3-235b",
2330      "name": "Qwen3-235B-A22B",
2331      "cost_per_1m_in": 0.22,
2332      "cost_per_1m_out": 0.88,
2333      "cost_per_1m_in_cached": 0.11,
2334      "cost_per_1m_out_cached": 0,
2335      "context_window": 32768,
2336      "default_max_tokens": 8000,
2337      "can_reason": false,
2338      "supports_attachments": false,
2339      "options": {}
2340    },
2341    {
2342      "id": "alibaba/qwen-3-30b",
2343      "name": "Qwen3-30B-A3B",
2344      "cost_per_1m_in": 0.08,
2345      "cost_per_1m_out": 0.29,
2346      "cost_per_1m_in_cached": 0,
2347      "cost_per_1m_out_cached": 0,
2348      "context_window": 40960,
2349      "default_max_tokens": 8000,
2350      "can_reason": true,
2351      "reasoning_levels": [
2352        "low",
2353        "medium",
2354        "high"
2355      ],
2356      "default_reasoning_effort": "medium",
2357      "supports_attachments": false,
2358      "options": {}
2359    },
2360    {
2361      "id": "bytedance/seed-1.6",
2362      "name": "Seed 1.6",
2363      "cost_per_1m_in": 0.25,
2364      "cost_per_1m_out": 2,
2365      "cost_per_1m_in_cached": 0.05,
2366      "cost_per_1m_out_cached": 0,
2367      "context_window": 256000,
2368      "default_max_tokens": 8000,
2369      "can_reason": true,
2370      "reasoning_levels": [
2371        "low",
2372        "medium",
2373        "high"
2374      ],
2375      "default_reasoning_effort": "medium",
2376      "supports_attachments": false,
2377      "options": {}
2378    },
2379    {
2380      "id": "perplexity/sonar",
2381      "name": "Sonar",
2382      "cost_per_1m_in": 0,
2383      "cost_per_1m_out": 0,
2384      "cost_per_1m_in_cached": 0,
2385      "cost_per_1m_out_cached": 0,
2386      "context_window": 127000,
2387      "default_max_tokens": 8000,
2388      "can_reason": false,
2389      "supports_attachments": true,
2390      "options": {}
2391    },
2392    {
2393      "id": "perplexity/sonar-pro",
2394      "name": "Sonar Pro",
2395      "cost_per_1m_in": 0,
2396      "cost_per_1m_out": 0,
2397      "cost_per_1m_in_cached": 0,
2398      "cost_per_1m_out_cached": 0,
2399      "context_window": 200000,
2400      "default_max_tokens": 8000,
2401      "can_reason": false,
2402      "supports_attachments": true,
2403      "options": {}
2404    },
2405    {
2406      "id": "arcee-ai/trinity-large-preview",
2407      "name": "Trinity Large Preview",
2408      "cost_per_1m_in": 0.25,
2409      "cost_per_1m_out": 1,
2410      "cost_per_1m_in_cached": 0,
2411      "cost_per_1m_out_cached": 0,
2412      "context_window": 131000,
2413      "default_max_tokens": 8000,
2414      "can_reason": false,
2415      "supports_attachments": false,
2416      "options": {}
2417    },
2418    {
2419      "id": "openai/gpt-oss-120b",
2420      "name": "gpt-oss-120b",
2421      "cost_per_1m_in": 0.15,
2422      "cost_per_1m_out": 0.6,
2423      "cost_per_1m_in_cached": 0,
2424      "cost_per_1m_out_cached": 0,
2425      "context_window": 128000,
2426      "default_max_tokens": 8000,
2427      "can_reason": true,
2428      "reasoning_levels": [
2429        "low",
2430        "medium",
2431        "high"
2432      ],
2433      "default_reasoning_effort": "medium",
2434      "supports_attachments": false,
2435      "options": {}
2436    },
2437    {
2438      "id": "openai/gpt-oss-20b",
2439      "name": "gpt-oss-20b",
2440      "cost_per_1m_in": 0.07,
2441      "cost_per_1m_out": 0.3,
2442      "cost_per_1m_in_cached": 0,
2443      "cost_per_1m_out_cached": 0,
2444      "context_window": 128000,
2445      "default_max_tokens": 8000,
2446      "can_reason": true,
2447      "reasoning_levels": [
2448        "low",
2449        "medium",
2450        "high"
2451      ],
2452      "default_reasoning_effort": "medium",
2453      "supports_attachments": false,
2454      "options": {}
2455    },
2456    {
2457      "id": "openai/gpt-oss-safeguard-20b",
2458      "name": "gpt-oss-safeguard-20b",
2459      "cost_per_1m_in": 0.075,
2460      "cost_per_1m_out": 0.3,
2461      "cost_per_1m_in_cached": 0.037,
2462      "cost_per_1m_out_cached": 0,
2463      "context_window": 131072,
2464      "default_max_tokens": 8000,
2465      "can_reason": true,
2466      "reasoning_levels": [
2467        "low",
2468        "medium",
2469        "high"
2470      ],
2471      "default_reasoning_effort": "medium",
2472      "supports_attachments": false,
2473      "options": {}
2474    },
2475    {
2476      "id": "openai/o1",
2477      "name": "o1",
2478      "cost_per_1m_in": 15,
2479      "cost_per_1m_out": 60,
2480      "cost_per_1m_in_cached": 7.5,
2481      "cost_per_1m_out_cached": 0,
2482      "context_window": 200000,
2483      "default_max_tokens": 8000,
2484      "can_reason": true,
2485      "reasoning_levels": [
2486        "low",
2487        "medium",
2488        "high"
2489      ],
2490      "default_reasoning_effort": "medium",
2491      "supports_attachments": true,
2492      "options": {}
2493    },
2494    {
2495      "id": "openai/o3",
2496      "name": "o3",
2497      "cost_per_1m_in": 2,
2498      "cost_per_1m_out": 8,
2499      "cost_per_1m_in_cached": 0.5,
2500      "cost_per_1m_out_cached": 0,
2501      "context_window": 200000,
2502      "default_max_tokens": 8000,
2503      "can_reason": true,
2504      "reasoning_levels": [
2505        "low",
2506        "medium",
2507        "high"
2508      ],
2509      "default_reasoning_effort": "medium",
2510      "supports_attachments": true,
2511      "options": {}
2512    },
2513    {
2514      "id": "openai/o3-pro",
2515      "name": "o3 Pro",
2516      "cost_per_1m_in": 20,
2517      "cost_per_1m_out": 80,
2518      "cost_per_1m_in_cached": 0,
2519      "cost_per_1m_out_cached": 0,
2520      "context_window": 200000,
2521      "default_max_tokens": 8000,
2522      "can_reason": true,
2523      "reasoning_levels": [
2524        "low",
2525        "medium",
2526        "high"
2527      ],
2528      "default_reasoning_effort": "medium",
2529      "supports_attachments": true,
2530      "options": {}
2531    },
2532    {
2533      "id": "openai/o3-deep-research",
2534      "name": "o3-deep-research",
2535      "cost_per_1m_in": 10,
2536      "cost_per_1m_out": 40,
2537      "cost_per_1m_in_cached": 2.5,
2538      "cost_per_1m_out_cached": 0,
2539      "context_window": 200000,
2540      "default_max_tokens": 8000,
2541      "can_reason": true,
2542      "reasoning_levels": [
2543        "low",
2544        "medium",
2545        "high"
2546      ],
2547      "default_reasoning_effort": "medium",
2548      "supports_attachments": true,
2549      "options": {}
2550    },
2551    {
2552      "id": "openai/o3-mini",
2553      "name": "o3-mini",
2554      "cost_per_1m_in": 1.1,
2555      "cost_per_1m_out": 4.4,
2556      "cost_per_1m_in_cached": 0.55,
2557      "cost_per_1m_out_cached": 0,
2558      "context_window": 200000,
2559      "default_max_tokens": 8000,
2560      "can_reason": true,
2561      "reasoning_levels": [
2562        "low",
2563        "medium",
2564        "high"
2565      ],
2566      "default_reasoning_effort": "medium",
2567      "supports_attachments": false,
2568      "options": {}
2569    },
2570    {
2571      "id": "openai/o4-mini",
2572      "name": "o4-mini",
2573      "cost_per_1m_in": 1.1,
2574      "cost_per_1m_out": 4.4,
2575      "cost_per_1m_in_cached": 0.275,
2576      "cost_per_1m_out_cached": 0,
2577      "context_window": 200000,
2578      "default_max_tokens": 8000,
2579      "can_reason": true,
2580      "reasoning_levels": [
2581        "low",
2582        "medium",
2583        "high"
2584      ],
2585      "default_reasoning_effort": "medium",
2586      "supports_attachments": true,
2587      "options": {}
2588    }
2589  ],
2590  "default_headers": {
2591    "HTTP-Referer": "https://charm.land",
2592    "X-Title": "Crush"
2593  }
2594}