vercel.json

   1{
   2  "name": "Vercel",
   3  "id": "vercel",
   4  "api_key": "$VERCEL_API_KEY",
   5  "api_endpoint": "https://ai-gateway.vercel.sh/v1",
   6  "type": "vercel",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-haiku-4.5",
   9  "models": [
  10    {
  11      "id": "anthropic/claude-3-haiku",
  12      "name": "Claude 3 Haiku",
  13      "cost_per_1m_in": 0.25,
  14      "cost_per_1m_out": 1.25,
  15      "cost_per_1m_in_cached": 0.03,
  16      "cost_per_1m_out_cached": 0.3,
  17      "context_window": 200000,
  18      "default_max_tokens": 4096,
  19      "can_reason": false,
  20      "supports_attachments": true,
  21      "options": {}
  22    },
  23    {
  24      "id": "anthropic/claude-3.5-haiku",
  25      "name": "Claude 3.5 Haiku",
  26      "cost_per_1m_in": 0.8,
  27      "cost_per_1m_out": 4,
  28      "cost_per_1m_in_cached": 0.08,
  29      "cost_per_1m_out_cached": 1,
  30      "context_window": 200000,
  31      "default_max_tokens": 8000,
  32      "can_reason": false,
  33      "supports_attachments": true,
  34      "options": {}
  35    },
  36    {
  37      "id": "anthropic/claude-3.5-sonnet",
  38      "name": "Claude 3.5 Sonnet",
  39      "cost_per_1m_in": 3,
  40      "cost_per_1m_out": 15,
  41      "cost_per_1m_in_cached": 0.3,
  42      "cost_per_1m_out_cached": 3.75,
  43      "context_window": 200000,
  44      "default_max_tokens": 8000,
  45      "can_reason": false,
  46      "supports_attachments": true,
  47      "options": {}
  48    },
  49    {
  50      "id": "anthropic/claude-3.5-sonnet-20240620",
  51      "name": "Claude 3.5 Sonnet (2024-06-20)",
  52      "cost_per_1m_in": 3,
  53      "cost_per_1m_out": 15,
  54      "cost_per_1m_in_cached": 0.3,
  55      "cost_per_1m_out_cached": 3.75,
  56      "context_window": 200000,
  57      "default_max_tokens": 8000,
  58      "can_reason": false,
  59      "supports_attachments": true,
  60      "options": {}
  61    },
  62    {
  63      "id": "anthropic/claude-3.7-sonnet",
  64      "name": "Claude 3.7 Sonnet",
  65      "cost_per_1m_in": 3,
  66      "cost_per_1m_out": 15,
  67      "cost_per_1m_in_cached": 0.3,
  68      "cost_per_1m_out_cached": 3.75,
  69      "context_window": 200000,
  70      "default_max_tokens": 8000,
  71      "can_reason": true,
  72      "reasoning_levels": [
  73        "none",
  74        "minimal",
  75        "low",
  76        "medium",
  77        "high",
  78        "xhigh"
  79      ],
  80      "default_reasoning_effort": "medium",
  81      "supports_attachments": true,
  82      "options": {}
  83    },
  84    {
  85      "id": "anthropic/claude-haiku-4.5",
  86      "name": "Claude Haiku 4.5",
  87      "cost_per_1m_in": 1,
  88      "cost_per_1m_out": 5,
  89      "cost_per_1m_in_cached": 0.1,
  90      "cost_per_1m_out_cached": 1.25,
  91      "context_window": 200000,
  92      "default_max_tokens": 8000,
  93      "can_reason": true,
  94      "reasoning_levels": [
  95        "none",
  96        "minimal",
  97        "low",
  98        "medium",
  99        "high",
 100        "xhigh"
 101      ],
 102      "default_reasoning_effort": "medium",
 103      "supports_attachments": true,
 104      "options": {}
 105    },
 106    {
 107      "id": "anthropic/claude-opus-4",
 108      "name": "Claude Opus 4",
 109      "cost_per_1m_in": 15,
 110      "cost_per_1m_out": 75,
 111      "cost_per_1m_in_cached": 1.5,
 112      "cost_per_1m_out_cached": 18.75,
 113      "context_window": 200000,
 114      "default_max_tokens": 8000,
 115      "can_reason": true,
 116      "reasoning_levels": [
 117        "none",
 118        "minimal",
 119        "low",
 120        "medium",
 121        "high",
 122        "xhigh"
 123      ],
 124      "default_reasoning_effort": "medium",
 125      "supports_attachments": true,
 126      "options": {}
 127    },
 128    {
 129      "id": "anthropic/claude-opus-4.1",
 130      "name": "Claude Opus 4.1",
 131      "cost_per_1m_in": 15,
 132      "cost_per_1m_out": 75,
 133      "cost_per_1m_in_cached": 1.5,
 134      "cost_per_1m_out_cached": 18.75,
 135      "context_window": 200000,
 136      "default_max_tokens": 8000,
 137      "can_reason": true,
 138      "reasoning_levels": [
 139        "none",
 140        "minimal",
 141        "low",
 142        "medium",
 143        "high",
 144        "xhigh"
 145      ],
 146      "default_reasoning_effort": "medium",
 147      "supports_attachments": true,
 148      "options": {}
 149    },
 150    {
 151      "id": "anthropic/claude-opus-4.5",
 152      "name": "Claude Opus 4.5",
 153      "cost_per_1m_in": 5,
 154      "cost_per_1m_out": 25,
 155      "cost_per_1m_in_cached": 0.5,
 156      "cost_per_1m_out_cached": 6.25,
 157      "context_window": 200000,
 158      "default_max_tokens": 8000,
 159      "can_reason": true,
 160      "reasoning_levels": [
 161        "none",
 162        "minimal",
 163        "low",
 164        "medium",
 165        "high",
 166        "xhigh"
 167      ],
 168      "default_reasoning_effort": "medium",
 169      "supports_attachments": true,
 170      "options": {}
 171    },
 172    {
 173      "id": "anthropic/claude-opus-4.6",
 174      "name": "Claude Opus 4.6",
 175      "cost_per_1m_in": 5,
 176      "cost_per_1m_out": 25,
 177      "cost_per_1m_in_cached": 0.5,
 178      "cost_per_1m_out_cached": 6.25,
 179      "context_window": 1000000,
 180      "default_max_tokens": 8000,
 181      "can_reason": true,
 182      "reasoning_levels": [
 183        "none",
 184        "minimal",
 185        "low",
 186        "medium",
 187        "high",
 188        "xhigh"
 189      ],
 190      "default_reasoning_effort": "medium",
 191      "supports_attachments": true,
 192      "options": {}
 193    },
 194    {
 195      "id": "anthropic/claude-sonnet-4",
 196      "name": "Claude Sonnet 4",
 197      "cost_per_1m_in": 3,
 198      "cost_per_1m_out": 15,
 199      "cost_per_1m_in_cached": 0.3,
 200      "cost_per_1m_out_cached": 3.75,
 201      "context_window": 1000000,
 202      "default_max_tokens": 8000,
 203      "can_reason": true,
 204      "reasoning_levels": [
 205        "none",
 206        "minimal",
 207        "low",
 208        "medium",
 209        "high",
 210        "xhigh"
 211      ],
 212      "default_reasoning_effort": "medium",
 213      "supports_attachments": true,
 214      "options": {}
 215    },
 216    {
 217      "id": "anthropic/claude-sonnet-4.5",
 218      "name": "Claude Sonnet 4.5",
 219      "cost_per_1m_in": 3,
 220      "cost_per_1m_out": 15,
 221      "cost_per_1m_in_cached": 0.3,
 222      "cost_per_1m_out_cached": 3.75,
 223      "context_window": 1000000,
 224      "default_max_tokens": 8000,
 225      "can_reason": true,
 226      "reasoning_levels": [
 227        "none",
 228        "minimal",
 229        "low",
 230        "medium",
 231        "high",
 232        "xhigh"
 233      ],
 234      "default_reasoning_effort": "medium",
 235      "supports_attachments": true,
 236      "options": {}
 237    },
 238    {
 239      "id": "anthropic/claude-sonnet-4.6",
 240      "name": "Claude Sonnet 4.6",
 241      "cost_per_1m_in": 3,
 242      "cost_per_1m_out": 15,
 243      "cost_per_1m_in_cached": 0.3,
 244      "cost_per_1m_out_cached": 3.75,
 245      "context_window": 1000000,
 246      "default_max_tokens": 8000,
 247      "can_reason": true,
 248      "reasoning_levels": [
 249        "none",
 250        "minimal",
 251        "low",
 252        "medium",
 253        "high",
 254        "xhigh"
 255      ],
 256      "default_reasoning_effort": "medium",
 257      "supports_attachments": true,
 258      "options": {}
 259    },
 260    {
 261      "id": "cohere/command-a",
 262      "name": "Command A",
 263      "cost_per_1m_in": 2.5,
 264      "cost_per_1m_out": 10,
 265      "cost_per_1m_in_cached": 0,
 266      "cost_per_1m_out_cached": 0,
 267      "context_window": 256000,
 268      "default_max_tokens": 8000,
 269      "can_reason": false,
 270      "supports_attachments": false,
 271      "options": {}
 272    },
 273    {
 274      "id": "deepseek/deepseek-v3",
 275      "name": "DeepSeek V3 0324",
 276      "cost_per_1m_in": 0.77,
 277      "cost_per_1m_out": 0.77,
 278      "cost_per_1m_in_cached": 0,
 279      "cost_per_1m_out_cached": 0,
 280      "context_window": 163840,
 281      "default_max_tokens": 8000,
 282      "can_reason": false,
 283      "supports_attachments": false,
 284      "options": {}
 285    },
 286    {
 287      "id": "deepseek/deepseek-v3.1-terminus",
 288      "name": "DeepSeek V3.1 Terminus",
 289      "cost_per_1m_in": 0.27,
 290      "cost_per_1m_out": 1,
 291      "cost_per_1m_in_cached": 0,
 292      "cost_per_1m_out_cached": 0,
 293      "context_window": 131072,
 294      "default_max_tokens": 8000,
 295      "can_reason": true,
 296      "reasoning_levels": [
 297        "low",
 298        "medium",
 299        "high"
 300      ],
 301      "default_reasoning_effort": "medium",
 302      "supports_attachments": false,
 303      "options": {}
 304    },
 305    {
 306      "id": "deepseek/deepseek-v3.2",
 307      "name": "DeepSeek V3.2",
 308      "cost_per_1m_in": 0.28,
 309      "cost_per_1m_out": 0.42,
 310      "cost_per_1m_in_cached": 0.028,
 311      "cost_per_1m_out_cached": 0,
 312      "context_window": 128000,
 313      "default_max_tokens": 8000,
 314      "can_reason": false,
 315      "supports_attachments": false,
 316      "options": {}
 317    },
 318    {
 319      "id": "deepseek/deepseek-v3.2-thinking",
 320      "name": "DeepSeek V3.2 Thinking",
 321      "cost_per_1m_in": 0.28,
 322      "cost_per_1m_out": 0.42,
 323      "cost_per_1m_in_cached": 0.028,
 324      "cost_per_1m_out_cached": 0,
 325      "context_window": 128000,
 326      "default_max_tokens": 8000,
 327      "can_reason": true,
 328      "reasoning_levels": [
 329        "low",
 330        "medium",
 331        "high"
 332      ],
 333      "default_reasoning_effort": "medium",
 334      "supports_attachments": false,
 335      "options": {}
 336    },
 337    {
 338      "id": "deepseek/deepseek-r1",
 339      "name": "DeepSeek-R1",
 340      "cost_per_1m_in": 1.35,
 341      "cost_per_1m_out": 5.4,
 342      "cost_per_1m_in_cached": 0,
 343      "cost_per_1m_out_cached": 0,
 344      "context_window": 128000,
 345      "default_max_tokens": 8000,
 346      "can_reason": true,
 347      "reasoning_levels": [
 348        "low",
 349        "medium",
 350        "high"
 351      ],
 352      "default_reasoning_effort": "medium",
 353      "supports_attachments": false,
 354      "options": {}
 355    },
 356    {
 357      "id": "deepseek/deepseek-v3.1",
 358      "name": "DeepSeek-V3.1",
 359      "cost_per_1m_in": 0.5,
 360      "cost_per_1m_out": 1.5,
 361      "cost_per_1m_in_cached": 0,
 362      "cost_per_1m_out_cached": 0,
 363      "context_window": 163840,
 364      "default_max_tokens": 8000,
 365      "can_reason": true,
 366      "reasoning_levels": [
 367        "low",
 368        "medium",
 369        "high"
 370      ],
 371      "default_reasoning_effort": "medium",
 372      "supports_attachments": false,
 373      "options": {}
 374    },
 375    {
 376      "id": "mistral/devstral-2",
 377      "name": "Devstral 2",
 378      "cost_per_1m_in": 0.4,
 379      "cost_per_1m_out": 2,
 380      "cost_per_1m_in_cached": 0,
 381      "cost_per_1m_out_cached": 0,
 382      "context_window": 256000,
 383      "default_max_tokens": 8000,
 384      "can_reason": false,
 385      "supports_attachments": false,
 386      "options": {}
 387    },
 388    {
 389      "id": "mistral/devstral-small",
 390      "name": "Devstral Small 1.1",
 391      "cost_per_1m_in": 0.1,
 392      "cost_per_1m_out": 0.3,
 393      "cost_per_1m_in_cached": 0,
 394      "cost_per_1m_out_cached": 0,
 395      "context_window": 128000,
 396      "default_max_tokens": 8000,
 397      "can_reason": false,
 398      "supports_attachments": false,
 399      "options": {}
 400    },
 401    {
 402      "id": "mistral/devstral-small-2",
 403      "name": "Devstral Small 2",
 404      "cost_per_1m_in": 0.1,
 405      "cost_per_1m_out": 0.3,
 406      "cost_per_1m_in_cached": 0,
 407      "cost_per_1m_out_cached": 0,
 408      "context_window": 256000,
 409      "default_max_tokens": 8000,
 410      "can_reason": false,
 411      "supports_attachments": false,
 412      "options": {}
 413    },
 414    {
 415      "id": "zai/glm-4.5-air",
 416      "name": "GLM 4.5 Air",
 417      "cost_per_1m_in": 0.2,
 418      "cost_per_1m_out": 1.1,
 419      "cost_per_1m_in_cached": 0.03,
 420      "cost_per_1m_out_cached": 0,
 421      "context_window": 128000,
 422      "default_max_tokens": 8000,
 423      "can_reason": true,
 424      "reasoning_levels": [
 425        "low",
 426        "medium",
 427        "high"
 428      ],
 429      "default_reasoning_effort": "medium",
 430      "supports_attachments": false,
 431      "options": {}
 432    },
 433    {
 434      "id": "zai/glm-4.5v",
 435      "name": "GLM 4.5V",
 436      "cost_per_1m_in": 0.6,
 437      "cost_per_1m_out": 1.8,
 438      "cost_per_1m_in_cached": 0.11,
 439      "cost_per_1m_out_cached": 0,
 440      "context_window": 66000,
 441      "default_max_tokens": 8000,
 442      "can_reason": false,
 443      "supports_attachments": true,
 444      "options": {}
 445    },
 446    {
 447      "id": "zai/glm-4.6",
 448      "name": "GLM 4.6",
 449      "cost_per_1m_in": 0.45,
 450      "cost_per_1m_out": 1.8,
 451      "cost_per_1m_in_cached": 0.11,
 452      "cost_per_1m_out_cached": 0,
 453      "context_window": 200000,
 454      "default_max_tokens": 8000,
 455      "can_reason": true,
 456      "reasoning_levels": [
 457        "low",
 458        "medium",
 459        "high"
 460      ],
 461      "default_reasoning_effort": "medium",
 462      "supports_attachments": false,
 463      "options": {}
 464    },
 465    {
 466      "id": "zai/glm-4.7",
 467      "name": "GLM 4.7",
 468      "cost_per_1m_in": 0.6,
 469      "cost_per_1m_out": 2.2,
 470      "cost_per_1m_in_cached": 0,
 471      "cost_per_1m_out_cached": 0,
 472      "context_window": 200000,
 473      "default_max_tokens": 8000,
 474      "can_reason": true,
 475      "reasoning_levels": [
 476        "low",
 477        "medium",
 478        "high"
 479      ],
 480      "default_reasoning_effort": "medium",
 481      "supports_attachments": false,
 482      "options": {}
 483    },
 484    {
 485      "id": "zai/glm-4.7-flash",
 486      "name": "GLM 4.7 Flash",
 487      "cost_per_1m_in": 0.07,
 488      "cost_per_1m_out": 0.4,
 489      "cost_per_1m_in_cached": 0,
 490      "cost_per_1m_out_cached": 0,
 491      "context_window": 200000,
 492      "default_max_tokens": 8000,
 493      "can_reason": true,
 494      "reasoning_levels": [
 495        "low",
 496        "medium",
 497        "high"
 498      ],
 499      "default_reasoning_effort": "medium",
 500      "supports_attachments": false,
 501      "options": {}
 502    },
 503    {
 504      "id": "zai/glm-4.7-flashx",
 505      "name": "GLM 4.7 FlashX",
 506      "cost_per_1m_in": 0.06,
 507      "cost_per_1m_out": 0.4,
 508      "cost_per_1m_in_cached": 0.01,
 509      "cost_per_1m_out_cached": 0,
 510      "context_window": 200000,
 511      "default_max_tokens": 8000,
 512      "can_reason": true,
 513      "reasoning_levels": [
 514        "low",
 515        "medium",
 516        "high"
 517      ],
 518      "default_reasoning_effort": "medium",
 519      "supports_attachments": false,
 520      "options": {}
 521    },
 522    {
 523      "id": "zai/glm-5",
 524      "name": "GLM 5",
 525      "cost_per_1m_in": 1,
 526      "cost_per_1m_out": 3.2,
 527      "cost_per_1m_in_cached": 0.2,
 528      "cost_per_1m_out_cached": 0,
 529      "context_window": 202800,
 530      "default_max_tokens": 8000,
 531      "can_reason": true,
 532      "reasoning_levels": [
 533        "low",
 534        "medium",
 535        "high"
 536      ],
 537      "default_reasoning_effort": "medium",
 538      "supports_attachments": false,
 539      "options": {}
 540    },
 541    {
 542      "id": "zai/glm-5-turbo",
 543      "name": "GLM 5 Turbo",
 544      "cost_per_1m_in": 1.2,
 545      "cost_per_1m_out": 4,
 546      "cost_per_1m_in_cached": 0.24,
 547      "cost_per_1m_out_cached": 0,
 548      "context_window": 202800,
 549      "default_max_tokens": 8000,
 550      "can_reason": true,
 551      "reasoning_levels": [
 552        "low",
 553        "medium",
 554        "high"
 555      ],
 556      "default_reasoning_effort": "medium",
 557      "supports_attachments": false,
 558      "options": {}
 559    },
 560    {
 561      "id": "zai/glm-4.5",
 562      "name": "GLM-4.5",
 563      "cost_per_1m_in": 0.6,
 564      "cost_per_1m_out": 2.2,
 565      "cost_per_1m_in_cached": 0.11,
 566      "cost_per_1m_out_cached": 0,
 567      "context_window": 128000,
 568      "default_max_tokens": 8000,
 569      "can_reason": true,
 570      "reasoning_levels": [
 571        "low",
 572        "medium",
 573        "high"
 574      ],
 575      "default_reasoning_effort": "medium",
 576      "supports_attachments": false,
 577      "options": {}
 578    },
 579    {
 580      "id": "zai/glm-4.6v",
 581      "name": "GLM-4.6V",
 582      "cost_per_1m_in": 0.3,
 583      "cost_per_1m_out": 0.9,
 584      "cost_per_1m_in_cached": 0.05,
 585      "cost_per_1m_out_cached": 0,
 586      "context_window": 128000,
 587      "default_max_tokens": 8000,
 588      "can_reason": true,
 589      "reasoning_levels": [
 590        "low",
 591        "medium",
 592        "high"
 593      ],
 594      "default_reasoning_effort": "medium",
 595      "supports_attachments": true,
 596      "options": {}
 597    },
 598    {
 599      "id": "zai/glm-4.6v-flash",
 600      "name": "GLM-4.6V-Flash",
 601      "cost_per_1m_in": 0,
 602      "cost_per_1m_out": 0,
 603      "cost_per_1m_in_cached": 0,
 604      "cost_per_1m_out_cached": 0,
 605      "context_window": 128000,
 606      "default_max_tokens": 8000,
 607      "can_reason": true,
 608      "reasoning_levels": [
 609        "low",
 610        "medium",
 611        "high"
 612      ],
 613      "default_reasoning_effort": "medium",
 614      "supports_attachments": true,
 615      "options": {}
 616    },
 617    {
 618      "id": "openai/gpt-5-chat",
 619      "name": "GPT 5 Chat",
 620      "cost_per_1m_in": 1.25,
 621      "cost_per_1m_out": 10,
 622      "cost_per_1m_in_cached": 0.125,
 623      "cost_per_1m_out_cached": 0,
 624      "context_window": 128000,
 625      "default_max_tokens": 8000,
 626      "can_reason": true,
 627      "reasoning_levels": [
 628        "low",
 629        "medium",
 630        "high"
 631      ],
 632      "default_reasoning_effort": "medium",
 633      "supports_attachments": true,
 634      "options": {}
 635    },
 636    {
 637      "id": "openai/gpt-5.1-codex-max",
 638      "name": "GPT 5.1 Codex Max",
 639      "cost_per_1m_in": 1.25,
 640      "cost_per_1m_out": 10,
 641      "cost_per_1m_in_cached": 0.125,
 642      "cost_per_1m_out_cached": 0,
 643      "context_window": 400000,
 644      "default_max_tokens": 8000,
 645      "can_reason": true,
 646      "reasoning_levels": [
 647        "low",
 648        "medium",
 649        "high"
 650      ],
 651      "default_reasoning_effort": "medium",
 652      "supports_attachments": true,
 653      "options": {}
 654    },
 655    {
 656      "id": "openai/gpt-5.1-codex-mini",
 657      "name": "GPT 5.1 Codex Mini",
 658      "cost_per_1m_in": 0.25,
 659      "cost_per_1m_out": 2,
 660      "cost_per_1m_in_cached": 0.025,
 661      "cost_per_1m_out_cached": 0,
 662      "context_window": 400000,
 663      "default_max_tokens": 8000,
 664      "can_reason": true,
 665      "reasoning_levels": [
 666        "low",
 667        "medium",
 668        "high"
 669      ],
 670      "default_reasoning_effort": "medium",
 671      "supports_attachments": true,
 672      "options": {}
 673    },
 674    {
 675      "id": "openai/gpt-5.1-thinking",
 676      "name": "GPT 5.1 Thinking",
 677      "cost_per_1m_in": 1.25,
 678      "cost_per_1m_out": 10,
 679      "cost_per_1m_in_cached": 0.125,
 680      "cost_per_1m_out_cached": 0,
 681      "context_window": 400000,
 682      "default_max_tokens": 8000,
 683      "can_reason": true,
 684      "reasoning_levels": [
 685        "low",
 686        "medium",
 687        "high"
 688      ],
 689      "default_reasoning_effort": "medium",
 690      "supports_attachments": true,
 691      "options": {}
 692    },
 693    {
 694      "id": "openai/gpt-5.2",
 695      "name": "GPT 5.2",
 696      "cost_per_1m_in": 1.75,
 697      "cost_per_1m_out": 14,
 698      "cost_per_1m_in_cached": 0.175,
 699      "cost_per_1m_out_cached": 0,
 700      "context_window": 400000,
 701      "default_max_tokens": 8000,
 702      "can_reason": true,
 703      "reasoning_levels": [
 704        "low",
 705        "medium",
 706        "high"
 707      ],
 708      "default_reasoning_effort": "medium",
 709      "supports_attachments": true,
 710      "options": {}
 711    },
 712    {
 713      "id": "openai/gpt-5.2-pro",
 714      "name": "GPT 5.2 ",
 715      "cost_per_1m_in": 21,
 716      "cost_per_1m_out": 168,
 717      "cost_per_1m_in_cached": 0,
 718      "cost_per_1m_out_cached": 0,
 719      "context_window": 400000,
 720      "default_max_tokens": 8000,
 721      "can_reason": true,
 722      "reasoning_levels": [
 723        "low",
 724        "medium",
 725        "high"
 726      ],
 727      "default_reasoning_effort": "medium",
 728      "supports_attachments": true,
 729      "options": {}
 730    },
 731    {
 732      "id": "openai/gpt-5.2-chat",
 733      "name": "GPT 5.2 Chat",
 734      "cost_per_1m_in": 1.75,
 735      "cost_per_1m_out": 14,
 736      "cost_per_1m_in_cached": 0.175,
 737      "cost_per_1m_out_cached": 0,
 738      "context_window": 128000,
 739      "default_max_tokens": 8000,
 740      "can_reason": true,
 741      "reasoning_levels": [
 742        "low",
 743        "medium",
 744        "high"
 745      ],
 746      "default_reasoning_effort": "medium",
 747      "supports_attachments": true,
 748      "options": {}
 749    },
 750    {
 751      "id": "openai/gpt-5.2-codex",
 752      "name": "GPT 5.2 Codex",
 753      "cost_per_1m_in": 1.75,
 754      "cost_per_1m_out": 14,
 755      "cost_per_1m_in_cached": 0.175,
 756      "cost_per_1m_out_cached": 0,
 757      "context_window": 400000,
 758      "default_max_tokens": 8000,
 759      "can_reason": true,
 760      "reasoning_levels": [
 761        "low",
 762        "medium",
 763        "high"
 764      ],
 765      "default_reasoning_effort": "medium",
 766      "supports_attachments": true,
 767      "options": {}
 768    },
 769    {
 770      "id": "openai/gpt-5.3-codex",
 771      "name": "GPT 5.3 Codex",
 772      "cost_per_1m_in": 1.75,
 773      "cost_per_1m_out": 14,
 774      "cost_per_1m_in_cached": 0.175,
 775      "cost_per_1m_out_cached": 0,
 776      "context_window": 400000,
 777      "default_max_tokens": 8000,
 778      "can_reason": true,
 779      "reasoning_levels": [
 780        "low",
 781        "medium",
 782        "high"
 783      ],
 784      "default_reasoning_effort": "medium",
 785      "supports_attachments": true,
 786      "options": {}
 787    },
 788    {
 789      "id": "openai/gpt-5.4",
 790      "name": "GPT 5.4",
 791      "cost_per_1m_in": 2.5,
 792      "cost_per_1m_out": 15,
 793      "cost_per_1m_in_cached": 0.25,
 794      "cost_per_1m_out_cached": 0,
 795      "context_window": 1050000,
 796      "default_max_tokens": 8000,
 797      "can_reason": true,
 798      "reasoning_levels": [
 799        "low",
 800        "medium",
 801        "high"
 802      ],
 803      "default_reasoning_effort": "medium",
 804      "supports_attachments": true,
 805      "options": {}
 806    },
 807    {
 808      "id": "openai/gpt-5.4-pro",
 809      "name": "GPT 5.4 Pro",
 810      "cost_per_1m_in": 30,
 811      "cost_per_1m_out": 180,
 812      "cost_per_1m_in_cached": 0,
 813      "cost_per_1m_out_cached": 0,
 814      "context_window": 1050000,
 815      "default_max_tokens": 8000,
 816      "can_reason": true,
 817      "reasoning_levels": [
 818        "low",
 819        "medium",
 820        "high"
 821      ],
 822      "default_reasoning_effort": "medium",
 823      "supports_attachments": true,
 824      "options": {}
 825    },
 826    {
 827      "id": "openai/gpt-4-turbo",
 828      "name": "GPT-4 Turbo",
 829      "cost_per_1m_in": 10,
 830      "cost_per_1m_out": 30,
 831      "cost_per_1m_in_cached": 0,
 832      "cost_per_1m_out_cached": 0,
 833      "context_window": 128000,
 834      "default_max_tokens": 4096,
 835      "can_reason": false,
 836      "supports_attachments": true,
 837      "options": {}
 838    },
 839    {
 840      "id": "openai/gpt-4.1",
 841      "name": "GPT-4.1",
 842      "cost_per_1m_in": 2,
 843      "cost_per_1m_out": 8,
 844      "cost_per_1m_in_cached": 0.5,
 845      "cost_per_1m_out_cached": 0,
 846      "context_window": 1047576,
 847      "default_max_tokens": 8000,
 848      "can_reason": false,
 849      "supports_attachments": true,
 850      "options": {}
 851    },
 852    {
 853      "id": "openai/gpt-4.1-mini",
 854      "name": "GPT-4.1 mini",
 855      "cost_per_1m_in": 0.4,
 856      "cost_per_1m_out": 1.6,
 857      "cost_per_1m_in_cached": 0.1,
 858      "cost_per_1m_out_cached": 0,
 859      "context_window": 1047576,
 860      "default_max_tokens": 8000,
 861      "can_reason": false,
 862      "supports_attachments": true,
 863      "options": {}
 864    },
 865    {
 866      "id": "openai/gpt-4.1-nano",
 867      "name": "GPT-4.1 nano",
 868      "cost_per_1m_in": 0.1,
 869      "cost_per_1m_out": 0.4,
 870      "cost_per_1m_in_cached": 0.025,
 871      "cost_per_1m_out_cached": 0,
 872      "context_window": 1047576,
 873      "default_max_tokens": 8000,
 874      "can_reason": false,
 875      "supports_attachments": true,
 876      "options": {}
 877    },
 878    {
 879      "id": "openai/gpt-4o",
 880      "name": "GPT-4o",
 881      "cost_per_1m_in": 2.5,
 882      "cost_per_1m_out": 10,
 883      "cost_per_1m_in_cached": 1.25,
 884      "cost_per_1m_out_cached": 0,
 885      "context_window": 128000,
 886      "default_max_tokens": 8000,
 887      "can_reason": false,
 888      "supports_attachments": true,
 889      "options": {}
 890    },
 891    {
 892      "id": "openai/gpt-4o-mini",
 893      "name": "GPT-4o mini",
 894      "cost_per_1m_in": 0.15,
 895      "cost_per_1m_out": 0.6,
 896      "cost_per_1m_in_cached": 0.075,
 897      "cost_per_1m_out_cached": 0,
 898      "context_window": 128000,
 899      "default_max_tokens": 8000,
 900      "can_reason": false,
 901      "supports_attachments": true,
 902      "options": {}
 903    },
 904    {
 905      "id": "openai/gpt-5",
 906      "name": "GPT-5",
 907      "cost_per_1m_in": 1.25,
 908      "cost_per_1m_out": 10,
 909      "cost_per_1m_in_cached": 0.125,
 910      "cost_per_1m_out_cached": 0,
 911      "context_window": 400000,
 912      "default_max_tokens": 8000,
 913      "can_reason": true,
 914      "reasoning_levels": [
 915        "low",
 916        "medium",
 917        "high"
 918      ],
 919      "default_reasoning_effort": "medium",
 920      "supports_attachments": true,
 921      "options": {}
 922    },
 923    {
 924      "id": "openai/gpt-5-mini",
 925      "name": "GPT-5 mini",
 926      "cost_per_1m_in": 0.25,
 927      "cost_per_1m_out": 2,
 928      "cost_per_1m_in_cached": 0.025,
 929      "cost_per_1m_out_cached": 0,
 930      "context_window": 400000,
 931      "default_max_tokens": 8000,
 932      "can_reason": true,
 933      "reasoning_levels": [
 934        "low",
 935        "medium",
 936        "high"
 937      ],
 938      "default_reasoning_effort": "medium",
 939      "supports_attachments": true,
 940      "options": {}
 941    },
 942    {
 943      "id": "openai/gpt-5-nano",
 944      "name": "GPT-5 nano",
 945      "cost_per_1m_in": 0.05,
 946      "cost_per_1m_out": 0.4,
 947      "cost_per_1m_in_cached": 0.005,
 948      "cost_per_1m_out_cached": 0,
 949      "context_window": 400000,
 950      "default_max_tokens": 8000,
 951      "can_reason": true,
 952      "reasoning_levels": [
 953        "low",
 954        "medium",
 955        "high"
 956      ],
 957      "default_reasoning_effort": "medium",
 958      "supports_attachments": true,
 959      "options": {}
 960    },
 961    {
 962      "id": "openai/gpt-5-pro",
 963      "name": "GPT-5 pro",
 964      "cost_per_1m_in": 15,
 965      "cost_per_1m_out": 120,
 966      "cost_per_1m_in_cached": 0,
 967      "cost_per_1m_out_cached": 0,
 968      "context_window": 400000,
 969      "default_max_tokens": 8000,
 970      "can_reason": true,
 971      "reasoning_levels": [
 972        "low",
 973        "medium",
 974        "high"
 975      ],
 976      "default_reasoning_effort": "medium",
 977      "supports_attachments": true,
 978      "options": {}
 979    },
 980    {
 981      "id": "openai/gpt-5-codex",
 982      "name": "GPT-5-Codex",
 983      "cost_per_1m_in": 1.25,
 984      "cost_per_1m_out": 10,
 985      "cost_per_1m_in_cached": 0.125,
 986      "cost_per_1m_out_cached": 0,
 987      "context_window": 400000,
 988      "default_max_tokens": 8000,
 989      "can_reason": true,
 990      "reasoning_levels": [
 991        "low",
 992        "medium",
 993        "high"
 994      ],
 995      "default_reasoning_effort": "medium",
 996      "supports_attachments": false,
 997      "options": {}
 998    },
 999    {
1000      "id": "openai/gpt-5.1-instant",
1001      "name": "GPT-5.1 Instant",
1002      "cost_per_1m_in": 1.25,
1003      "cost_per_1m_out": 10,
1004      "cost_per_1m_in_cached": 0.125,
1005      "cost_per_1m_out_cached": 0,
1006      "context_window": 128000,
1007      "default_max_tokens": 8000,
1008      "can_reason": true,
1009      "reasoning_levels": [
1010        "low",
1011        "medium",
1012        "high"
1013      ],
1014      "default_reasoning_effort": "medium",
1015      "supports_attachments": true,
1016      "options": {}
1017    },
1018    {
1019      "id": "openai/gpt-5.1-codex",
1020      "name": "GPT-5.1-Codex",
1021      "cost_per_1m_in": 1.25,
1022      "cost_per_1m_out": 10,
1023      "cost_per_1m_in_cached": 0.125,
1024      "cost_per_1m_out_cached": 0,
1025      "context_window": 400000,
1026      "default_max_tokens": 8000,
1027      "can_reason": true,
1028      "reasoning_levels": [
1029        "low",
1030        "medium",
1031        "high"
1032      ],
1033      "default_reasoning_effort": "medium",
1034      "supports_attachments": true,
1035      "options": {}
1036    },
1037    {
1038      "id": "openai/gpt-5.3-chat",
1039      "name": "GPT-5.3 Chat",
1040      "cost_per_1m_in": 1.75,
1041      "cost_per_1m_out": 14,
1042      "cost_per_1m_in_cached": 0.175,
1043      "cost_per_1m_out_cached": 0,
1044      "context_window": 128000,
1045      "default_max_tokens": 8000,
1046      "can_reason": true,
1047      "reasoning_levels": [
1048        "low",
1049        "medium",
1050        "high"
1051      ],
1052      "default_reasoning_effort": "medium",
1053      "supports_attachments": true,
1054      "options": {}
1055    },
1056    {
1057      "id": "google/gemini-2.0-flash",
1058      "name": "Gemini 2.0 Flash",
1059      "cost_per_1m_in": 0.15,
1060      "cost_per_1m_out": 0.6,
1061      "cost_per_1m_in_cached": 0,
1062      "cost_per_1m_out_cached": 0,
1063      "context_window": 1048576,
1064      "default_max_tokens": 8000,
1065      "can_reason": false,
1066      "supports_attachments": true,
1067      "options": {}
1068    },
1069    {
1070      "id": "google/gemini-2.0-flash-lite",
1071      "name": "Gemini 2.0 Flash Lite",
1072      "cost_per_1m_in": 0.075,
1073      "cost_per_1m_out": 0.3,
1074      "cost_per_1m_in_cached": 0,
1075      "cost_per_1m_out_cached": 0,
1076      "context_window": 1048576,
1077      "default_max_tokens": 8000,
1078      "can_reason": false,
1079      "supports_attachments": true,
1080      "options": {}
1081    },
1082    {
1083      "id": "google/gemini-2.5-flash",
1084      "name": "Gemini 2.5 Flash",
1085      "cost_per_1m_in": 0.3,
1086      "cost_per_1m_out": 2.5,
1087      "cost_per_1m_in_cached": 0.03,
1088      "cost_per_1m_out_cached": 0,
1089      "context_window": 1000000,
1090      "default_max_tokens": 8000,
1091      "can_reason": true,
1092      "reasoning_levels": [
1093        "low",
1094        "medium",
1095        "high"
1096      ],
1097      "default_reasoning_effort": "medium",
1098      "supports_attachments": true,
1099      "options": {}
1100    },
1101    {
1102      "id": "google/gemini-2.5-flash-lite",
1103      "name": "Gemini 2.5 Flash Lite",
1104      "cost_per_1m_in": 0.1,
1105      "cost_per_1m_out": 0.4,
1106      "cost_per_1m_in_cached": 0.01,
1107      "cost_per_1m_out_cached": 0,
1108      "context_window": 1048576,
1109      "default_max_tokens": 8000,
1110      "can_reason": true,
1111      "reasoning_levels": [
1112        "low",
1113        "medium",
1114        "high"
1115      ],
1116      "default_reasoning_effort": "medium",
1117      "supports_attachments": true,
1118      "options": {}
1119    },
1120    {
1121      "id": "google/gemini-2.5-flash-lite-preview-09-2025",
1122      "name": "Gemini 2.5 Flash Lite Preview 09-2025",
1123      "cost_per_1m_in": 0.1,
1124      "cost_per_1m_out": 0.4,
1125      "cost_per_1m_in_cached": 0.01,
1126      "cost_per_1m_out_cached": 0,
1127      "context_window": 1048576,
1128      "default_max_tokens": 8000,
1129      "can_reason": true,
1130      "reasoning_levels": [
1131        "low",
1132        "medium",
1133        "high"
1134      ],
1135      "default_reasoning_effort": "medium",
1136      "supports_attachments": true,
1137      "options": {}
1138    },
1139    {
1140      "id": "google/gemini-2.5-flash-preview-09-2025",
1141      "name": "Gemini 2.5 Flash Preview 09-2025",
1142      "cost_per_1m_in": 0.3,
1143      "cost_per_1m_out": 2.5,
1144      "cost_per_1m_in_cached": 0.03,
1145      "cost_per_1m_out_cached": 0,
1146      "context_window": 1000000,
1147      "default_max_tokens": 8000,
1148      "can_reason": true,
1149      "reasoning_levels": [
1150        "low",
1151        "medium",
1152        "high"
1153      ],
1154      "default_reasoning_effort": "medium",
1155      "supports_attachments": true,
1156      "options": {}
1157    },
1158    {
1159      "id": "google/gemini-2.5-pro",
1160      "name": "Gemini 2.5 Pro",
1161      "cost_per_1m_in": 1.25,
1162      "cost_per_1m_out": 10,
1163      "cost_per_1m_in_cached": 0.125,
1164      "cost_per_1m_out_cached": 0,
1165      "context_window": 1048576,
1166      "default_max_tokens": 8000,
1167      "can_reason": true,
1168      "reasoning_levels": [
1169        "low",
1170        "medium",
1171        "high"
1172      ],
1173      "default_reasoning_effort": "medium",
1174      "supports_attachments": true,
1175      "options": {}
1176    },
1177    {
1178      "id": "google/gemini-3-flash",
1179      "name": "Gemini 3 Flash",
1180      "cost_per_1m_in": 0.5,
1181      "cost_per_1m_out": 3,
1182      "cost_per_1m_in_cached": 0.05,
1183      "cost_per_1m_out_cached": 0,
1184      "context_window": 1000000,
1185      "default_max_tokens": 8000,
1186      "can_reason": true,
1187      "reasoning_levels": [
1188        "low",
1189        "medium",
1190        "high"
1191      ],
1192      "default_reasoning_effort": "medium",
1193      "supports_attachments": true,
1194      "options": {}
1195    },
1196    {
1197      "id": "google/gemini-3-pro-preview",
1198      "name": "Gemini 3 Pro Preview",
1199      "cost_per_1m_in": 2,
1200      "cost_per_1m_out": 12,
1201      "cost_per_1m_in_cached": 0.2,
1202      "cost_per_1m_out_cached": 0,
1203      "context_window": 1000000,
1204      "default_max_tokens": 8000,
1205      "can_reason": true,
1206      "reasoning_levels": [
1207        "low",
1208        "medium",
1209        "high"
1210      ],
1211      "default_reasoning_effort": "medium",
1212      "supports_attachments": true,
1213      "options": {}
1214    },
1215    {
1216      "id": "google/gemini-3.1-flash-lite-preview",
1217      "name": "Gemini 3.1 Flash Lite Preview",
1218      "cost_per_1m_in": 0.25,
1219      "cost_per_1m_out": 1.5,
1220      "cost_per_1m_in_cached": 0,
1221      "cost_per_1m_out_cached": 0,
1222      "context_window": 1000000,
1223      "default_max_tokens": 8000,
1224      "can_reason": true,
1225      "reasoning_levels": [
1226        "low",
1227        "medium",
1228        "high"
1229      ],
1230      "default_reasoning_effort": "medium",
1231      "supports_attachments": true,
1232      "options": {}
1233    },
1234    {
1235      "id": "google/gemini-3.1-pro-preview",
1236      "name": "Gemini 3.1 Pro Preview",
1237      "cost_per_1m_in": 2,
1238      "cost_per_1m_out": 12,
1239      "cost_per_1m_in_cached": 0.2,
1240      "cost_per_1m_out_cached": 0,
1241      "context_window": 1000000,
1242      "default_max_tokens": 8000,
1243      "can_reason": true,
1244      "reasoning_levels": [
1245        "low",
1246        "medium",
1247        "high"
1248      ],
1249      "default_reasoning_effort": "medium",
1250      "supports_attachments": true,
1251      "options": {}
1252    },
1253    {
1254      "id": "xai/grok-2-vision",
1255      "name": "Grok 2 Vision",
1256      "cost_per_1m_in": 2,
1257      "cost_per_1m_out": 10,
1258      "cost_per_1m_in_cached": 0,
1259      "cost_per_1m_out_cached": 0,
1260      "context_window": 32768,
1261      "default_max_tokens": 8000,
1262      "can_reason": false,
1263      "supports_attachments": true,
1264      "options": {}
1265    },
1266    {
1267      "id": "xai/grok-3",
1268      "name": "Grok 3 Beta",
1269      "cost_per_1m_in": 3,
1270      "cost_per_1m_out": 15,
1271      "cost_per_1m_in_cached": 0,
1272      "cost_per_1m_out_cached": 0,
1273      "context_window": 131072,
1274      "default_max_tokens": 8000,
1275      "can_reason": false,
1276      "supports_attachments": false,
1277      "options": {}
1278    },
1279    {
1280      "id": "xai/grok-3-fast",
1281      "name": "Grok 3 Fast Beta",
1282      "cost_per_1m_in": 5,
1283      "cost_per_1m_out": 25,
1284      "cost_per_1m_in_cached": 0,
1285      "cost_per_1m_out_cached": 0,
1286      "context_window": 131072,
1287      "default_max_tokens": 8000,
1288      "can_reason": false,
1289      "supports_attachments": false,
1290      "options": {}
1291    },
1292    {
1293      "id": "xai/grok-3-mini",
1294      "name": "Grok 3 Mini Beta",
1295      "cost_per_1m_in": 0.3,
1296      "cost_per_1m_out": 0.5,
1297      "cost_per_1m_in_cached": 0,
1298      "cost_per_1m_out_cached": 0,
1299      "context_window": 131072,
1300      "default_max_tokens": 8000,
1301      "can_reason": false,
1302      "supports_attachments": false,
1303      "options": {}
1304    },
1305    {
1306      "id": "xai/grok-3-mini-fast",
1307      "name": "Grok 3 Mini Fast Beta",
1308      "cost_per_1m_in": 0.6,
1309      "cost_per_1m_out": 4,
1310      "cost_per_1m_in_cached": 0,
1311      "cost_per_1m_out_cached": 0,
1312      "context_window": 131072,
1313      "default_max_tokens": 8000,
1314      "can_reason": false,
1315      "supports_attachments": false,
1316      "options": {}
1317    },
1318    {
1319      "id": "xai/grok-4",
1320      "name": "Grok 4",
1321      "cost_per_1m_in": 3,
1322      "cost_per_1m_out": 15,
1323      "cost_per_1m_in_cached": 0.75,
1324      "cost_per_1m_out_cached": 0,
1325      "context_window": 256000,
1326      "default_max_tokens": 8000,
1327      "can_reason": true,
1328      "reasoning_levels": [
1329        "low",
1330        "medium",
1331        "high"
1332      ],
1333      "default_reasoning_effort": "medium",
1334      "supports_attachments": true,
1335      "options": {}
1336    },
1337    {
1338      "id": "xai/grok-4-fast-non-reasoning",
1339      "name": "Grok 4 Fast Non-Reasoning",
1340      "cost_per_1m_in": 0.2,
1341      "cost_per_1m_out": 0.5,
1342      "cost_per_1m_in_cached": 0.05,
1343      "cost_per_1m_out_cached": 0,
1344      "context_window": 2000000,
1345      "default_max_tokens": 8000,
1346      "can_reason": false,
1347      "supports_attachments": false,
1348      "options": {}
1349    },
1350    {
1351      "id": "xai/grok-4-fast-reasoning",
1352      "name": "Grok 4 Fast Reasoning",
1353      "cost_per_1m_in": 0.2,
1354      "cost_per_1m_out": 0.5,
1355      "cost_per_1m_in_cached": 0.05,
1356      "cost_per_1m_out_cached": 0,
1357      "context_window": 2000000,
1358      "default_max_tokens": 8000,
1359      "can_reason": true,
1360      "reasoning_levels": [
1361        "low",
1362        "medium",
1363        "high"
1364      ],
1365      "default_reasoning_effort": "medium",
1366      "supports_attachments": false,
1367      "options": {}
1368    },
1369    {
1370      "id": "xai/grok-4.1-fast-non-reasoning",
1371      "name": "Grok 4.1 Fast Non-Reasoning",
1372      "cost_per_1m_in": 0.2,
1373      "cost_per_1m_out": 0.5,
1374      "cost_per_1m_in_cached": 0.05,
1375      "cost_per_1m_out_cached": 0,
1376      "context_window": 2000000,
1377      "default_max_tokens": 8000,
1378      "can_reason": false,
1379      "supports_attachments": false,
1380      "options": {}
1381    },
1382    {
1383      "id": "xai/grok-4.1-fast-reasoning",
1384      "name": "Grok 4.1 Fast Reasoning",
1385      "cost_per_1m_in": 0.2,
1386      "cost_per_1m_out": 0.5,
1387      "cost_per_1m_in_cached": 0.05,
1388      "cost_per_1m_out_cached": 0,
1389      "context_window": 2000000,
1390      "default_max_tokens": 8000,
1391      "can_reason": true,
1392      "reasoning_levels": [
1393        "low",
1394        "medium",
1395        "high"
1396      ],
1397      "default_reasoning_effort": "medium",
1398      "supports_attachments": false,
1399      "options": {}
1400    },
1401    {
1402      "id": "xai/grok-4.20-non-reasoning-beta",
1403      "name": "Grok 4.20 Beta Non-Reasoning",
1404      "cost_per_1m_in": 2,
1405      "cost_per_1m_out": 6,
1406      "cost_per_1m_in_cached": 0.2,
1407      "cost_per_1m_out_cached": 0,
1408      "context_window": 2000000,
1409      "default_max_tokens": 8000,
1410      "can_reason": false,
1411      "supports_attachments": true,
1412      "options": {}
1413    },
1414    {
1415      "id": "xai/grok-4.20-reasoning-beta",
1416      "name": "Grok 4.20 Beta Reasoning",
1417      "cost_per_1m_in": 2,
1418      "cost_per_1m_out": 6,
1419      "cost_per_1m_in_cached": 0.2,
1420      "cost_per_1m_out_cached": 0,
1421      "context_window": 2000000,
1422      "default_max_tokens": 8000,
1423      "can_reason": true,
1424      "reasoning_levels": [
1425        "low",
1426        "medium",
1427        "high"
1428      ],
1429      "default_reasoning_effort": "medium",
1430      "supports_attachments": true,
1431      "options": {}
1432    },
1433    {
1434      "id": "xai/grok-4.20-multi-agent-beta",
1435      "name": "Grok 4.20 Multi Agent Beta",
1436      "cost_per_1m_in": 2,
1437      "cost_per_1m_out": 6,
1438      "cost_per_1m_in_cached": 0.2,
1439      "cost_per_1m_out_cached": 0,
1440      "context_window": 2000000,
1441      "default_max_tokens": 8000,
1442      "can_reason": true,
1443      "reasoning_levels": [
1444        "low",
1445        "medium",
1446        "high"
1447      ],
1448      "default_reasoning_effort": "medium",
1449      "supports_attachments": false,
1450      "options": {}
1451    },
1452    {
1453      "id": "xai/grok-code-fast-1",
1454      "name": "Grok Code Fast 1",
1455      "cost_per_1m_in": 0.2,
1456      "cost_per_1m_out": 1.5,
1457      "cost_per_1m_in_cached": 0.02,
1458      "cost_per_1m_out_cached": 0,
1459      "context_window": 256000,
1460      "default_max_tokens": 8000,
1461      "can_reason": true,
1462      "reasoning_levels": [
1463        "low",
1464        "medium",
1465        "high"
1466      ],
1467      "default_reasoning_effort": "medium",
1468      "supports_attachments": false,
1469      "options": {}
1470    },
1471    {
1472      "id": "prime-intellect/intellect-3",
1473      "name": "INTELLECT 3",
1474      "cost_per_1m_in": 0.2,
1475      "cost_per_1m_out": 1.1,
1476      "cost_per_1m_in_cached": 0,
1477      "cost_per_1m_out_cached": 0,
1478      "context_window": 131072,
1479      "default_max_tokens": 8000,
1480      "can_reason": true,
1481      "reasoning_levels": [
1482        "low",
1483        "medium",
1484        "high"
1485      ],
1486      "default_reasoning_effort": "medium",
1487      "supports_attachments": false,
1488      "options": {}
1489    },
1490    {
1491      "id": "moonshotai/kimi-k2",
1492      "name": "Kimi K2",
1493      "cost_per_1m_in": 0.6,
1494      "cost_per_1m_out": 2.5,
1495      "cost_per_1m_in_cached": 0.15,
1496      "cost_per_1m_out_cached": 0,
1497      "context_window": 131072,
1498      "default_max_tokens": 8000,
1499      "can_reason": false,
1500      "supports_attachments": false,
1501      "options": {}
1502    },
1503    {
1504      "id": "moonshotai/kimi-k2-0905",
1505      "name": "Kimi K2 0905",
1506      "cost_per_1m_in": 0.6,
1507      "cost_per_1m_out": 2.5,
1508      "cost_per_1m_in_cached": 0,
1509      "cost_per_1m_out_cached": 0,
1510      "context_window": 256000,
1511      "default_max_tokens": 8000,
1512      "can_reason": false,
1513      "supports_attachments": false,
1514      "options": {}
1515    },
1516    {
1517      "id": "moonshotai/kimi-k2-thinking",
1518      "name": "Kimi K2 Thinking",
1519      "cost_per_1m_in": 0.6,
1520      "cost_per_1m_out": 2.5,
1521      "cost_per_1m_in_cached": 0.15,
1522      "cost_per_1m_out_cached": 0,
1523      "context_window": 262114,
1524      "default_max_tokens": 8000,
1525      "can_reason": true,
1526      "reasoning_levels": [
1527        "low",
1528        "medium",
1529        "high"
1530      ],
1531      "default_reasoning_effort": "medium",
1532      "supports_attachments": false,
1533      "options": {}
1534    },
1535    {
1536      "id": "moonshotai/kimi-k2-thinking-turbo",
1537      "name": "Kimi K2 Thinking Turbo",
1538      "cost_per_1m_in": 1.15,
1539      "cost_per_1m_out": 8,
1540      "cost_per_1m_in_cached": 0.15,
1541      "cost_per_1m_out_cached": 0,
1542      "context_window": 262114,
1543      "default_max_tokens": 8000,
1544      "can_reason": true,
1545      "reasoning_levels": [
1546        "low",
1547        "medium",
1548        "high"
1549      ],
1550      "default_reasoning_effort": "medium",
1551      "supports_attachments": false,
1552      "options": {}
1553    },
1554    {
1555      "id": "moonshotai/kimi-k2-turbo",
1556      "name": "Kimi K2 Turbo",
1557      "cost_per_1m_in": 2.4,
1558      "cost_per_1m_out": 10,
1559      "cost_per_1m_in_cached": 0,
1560      "cost_per_1m_out_cached": 0,
1561      "context_window": 256000,
1562      "default_max_tokens": 8000,
1563      "can_reason": false,
1564      "supports_attachments": false,
1565      "options": {}
1566    },
1567    {
1568      "id": "moonshotai/kimi-k2.5",
1569      "name": "Kimi K2.5",
1570      "cost_per_1m_in": 0.6,
1571      "cost_per_1m_out": 3,
1572      "cost_per_1m_in_cached": 0.1,
1573      "cost_per_1m_out_cached": 0,
1574      "context_window": 262114,
1575      "default_max_tokens": 8000,
1576      "can_reason": true,
1577      "reasoning_levels": [
1578        "low",
1579        "medium",
1580        "high"
1581      ],
1582      "default_reasoning_effort": "medium",
1583      "supports_attachments": true,
1584      "options": {}
1585    },
1586    {
1587      "id": "meta/llama-3.1-70b",
1588      "name": "Llama 3.1 70B Instruct",
1589      "cost_per_1m_in": 0.72,
1590      "cost_per_1m_out": 0.72,
1591      "cost_per_1m_in_cached": 0,
1592      "cost_per_1m_out_cached": 0,
1593      "context_window": 128000,
1594      "default_max_tokens": 8000,
1595      "can_reason": false,
1596      "supports_attachments": false,
1597      "options": {}
1598    },
1599    {
1600      "id": "meta/llama-3.1-8b",
1601      "name": "Llama 3.1 8B Instruct",
1602      "cost_per_1m_in": 0.1,
1603      "cost_per_1m_out": 0.1,
1604      "cost_per_1m_in_cached": 0,
1605      "cost_per_1m_out_cached": 0,
1606      "context_window": 128000,
1607      "default_max_tokens": 8000,
1608      "can_reason": false,
1609      "supports_attachments": false,
1610      "options": {}
1611    },
1612    {
1613      "id": "meta/llama-3.2-11b",
1614      "name": "Llama 3.2 11B Vision Instruct",
1615      "cost_per_1m_in": 0.16,
1616      "cost_per_1m_out": 0.16,
1617      "cost_per_1m_in_cached": 0,
1618      "cost_per_1m_out_cached": 0,
1619      "context_window": 128000,
1620      "default_max_tokens": 8000,
1621      "can_reason": false,
1622      "supports_attachments": true,
1623      "options": {}
1624    },
1625    {
1626      "id": "meta/llama-3.2-90b",
1627      "name": "Llama 3.2 90B Vision Instruct",
1628      "cost_per_1m_in": 0.72,
1629      "cost_per_1m_out": 0.72,
1630      "cost_per_1m_in_cached": 0,
1631      "cost_per_1m_out_cached": 0,
1632      "context_window": 128000,
1633      "default_max_tokens": 8000,
1634      "can_reason": false,
1635      "supports_attachments": true,
1636      "options": {}
1637    },
1638    {
1639      "id": "meta/llama-3.3-70b",
1640      "name": "Llama 3.3 70B Instruct",
1641      "cost_per_1m_in": 0.72,
1642      "cost_per_1m_out": 0.72,
1643      "cost_per_1m_in_cached": 0,
1644      "cost_per_1m_out_cached": 0,
1645      "context_window": 128000,
1646      "default_max_tokens": 8000,
1647      "can_reason": false,
1648      "supports_attachments": false,
1649      "options": {}
1650    },
1651    {
1652      "id": "meta/llama-4-maverick",
1653      "name": "Llama 4 Maverick 17B Instruct",
1654      "cost_per_1m_in": 0.24,
1655      "cost_per_1m_out": 0.97,
1656      "cost_per_1m_in_cached": 0,
1657      "cost_per_1m_out_cached": 0,
1658      "context_window": 128000,
1659      "default_max_tokens": 8000,
1660      "can_reason": false,
1661      "supports_attachments": true,
1662      "options": {}
1663    },
1664    {
1665      "id": "meta/llama-4-scout",
1666      "name": "Llama 4 Scout 17B Instruct",
1667      "cost_per_1m_in": 0.17,
1668      "cost_per_1m_out": 0.66,
1669      "cost_per_1m_in_cached": 0,
1670      "cost_per_1m_out_cached": 0,
1671      "context_window": 128000,
1672      "default_max_tokens": 8000,
1673      "can_reason": false,
1674      "supports_attachments": true,
1675      "options": {}
1676    },
1677    {
1678      "id": "meituan/longcat-flash-chat",
1679      "name": "LongCat Flash Chat",
1680      "cost_per_1m_in": 0,
1681      "cost_per_1m_out": 0,
1682      "cost_per_1m_in_cached": 0,
1683      "cost_per_1m_out_cached": 0,
1684      "context_window": 128000,
1685      "default_max_tokens": 8000,
1686      "can_reason": false,
1687      "supports_attachments": false,
1688      "options": {}
1689    },
1690    {
1691      "id": "meituan/longcat-flash-thinking",
1692      "name": "LongCat Flash Thinking",
1693      "cost_per_1m_in": 0.15,
1694      "cost_per_1m_out": 1.5,
1695      "cost_per_1m_in_cached": 0,
1696      "cost_per_1m_out_cached": 0,
1697      "context_window": 128000,
1698      "default_max_tokens": 8000,
1699      "can_reason": true,
1700      "reasoning_levels": [
1701        "low",
1702        "medium",
1703        "high"
1704      ],
1705      "default_reasoning_effort": "medium",
1706      "supports_attachments": false,
1707      "options": {}
1708    },
1709    {
1710      "id": "inception/mercury-2",
1711      "name": "Mercury 2",
1712      "cost_per_1m_in": 0.25,
1713      "cost_per_1m_out": 0.75,
1714      "cost_per_1m_in_cached": 0.025,
1715      "cost_per_1m_out_cached": 0,
1716      "context_window": 128000,
1717      "default_max_tokens": 8000,
1718      "can_reason": true,
1719      "reasoning_levels": [
1720        "low",
1721        "medium",
1722        "high"
1723      ],
1724      "default_reasoning_effort": "medium",
1725      "supports_attachments": false,
1726      "options": {}
1727    },
1728    {
1729      "id": "inception/mercury-coder-small",
1730      "name": "Mercury Coder Small Beta",
1731      "cost_per_1m_in": 0.25,
1732      "cost_per_1m_out": 1,
1733      "cost_per_1m_in_cached": 0,
1734      "cost_per_1m_out_cached": 0,
1735      "context_window": 32000,
1736      "default_max_tokens": 8000,
1737      "can_reason": false,
1738      "supports_attachments": false,
1739      "options": {}
1740    },
1741    {
1742      "id": "xiaomi/mimo-v2-flash",
1743      "name": "MiMo V2 Flash",
1744      "cost_per_1m_in": 0.1,
1745      "cost_per_1m_out": 0.3,
1746      "cost_per_1m_in_cached": 0.02,
1747      "cost_per_1m_out_cached": 0,
1748      "context_window": 262144,
1749      "default_max_tokens": 8000,
1750      "can_reason": true,
1751      "reasoning_levels": [
1752        "low",
1753        "medium",
1754        "high"
1755      ],
1756      "default_reasoning_effort": "medium",
1757      "supports_attachments": false,
1758      "options": {}
1759    },
1760    {
1761      "id": "minimax/minimax-m2",
1762      "name": "MiniMax M2",
1763      "cost_per_1m_in": 0.3,
1764      "cost_per_1m_out": 1.2,
1765      "cost_per_1m_in_cached": 0.03,
1766      "cost_per_1m_out_cached": 0.375,
1767      "context_window": 205000,
1768      "default_max_tokens": 8000,
1769      "can_reason": true,
1770      "reasoning_levels": [
1771        "low",
1772        "medium",
1773        "high"
1774      ],
1775      "default_reasoning_effort": "medium",
1776      "supports_attachments": false,
1777      "options": {}
1778    },
1779    {
1780      "id": "minimax/minimax-m2.1",
1781      "name": "MiniMax M2.1",
1782      "cost_per_1m_in": 0.3,
1783      "cost_per_1m_out": 1.2,
1784      "cost_per_1m_in_cached": 0.03,
1785      "cost_per_1m_out_cached": 0.375,
1786      "context_window": 204800,
1787      "default_max_tokens": 8000,
1788      "can_reason": true,
1789      "reasoning_levels": [
1790        "low",
1791        "medium",
1792        "high"
1793      ],
1794      "default_reasoning_effort": "medium",
1795      "supports_attachments": false,
1796      "options": {}
1797    },
1798    {
1799      "id": "minimax/minimax-m2.1-lightning",
1800      "name": "MiniMax M2.1 Lightning",
1801      "cost_per_1m_in": 0.3,
1802      "cost_per_1m_out": 2.4,
1803      "cost_per_1m_in_cached": 0.03,
1804      "cost_per_1m_out_cached": 0.375,
1805      "context_window": 204800,
1806      "default_max_tokens": 8000,
1807      "can_reason": true,
1808      "reasoning_levels": [
1809        "low",
1810        "medium",
1811        "high"
1812      ],
1813      "default_reasoning_effort": "medium",
1814      "supports_attachments": false,
1815      "options": {}
1816    },
1817    {
1818      "id": "minimax/minimax-m2.5",
1819      "name": "MiniMax M2.5",
1820      "cost_per_1m_in": 0.3,
1821      "cost_per_1m_out": 1.2,
1822      "cost_per_1m_in_cached": 0.03,
1823      "cost_per_1m_out_cached": 0.375,
1824      "context_window": 204800,
1825      "default_max_tokens": 8000,
1826      "can_reason": true,
1827      "reasoning_levels": [
1828        "low",
1829        "medium",
1830        "high"
1831      ],
1832      "default_reasoning_effort": "medium",
1833      "supports_attachments": false,
1834      "options": {}
1835    },
1836    {
1837      "id": "minimax/minimax-m2.5-highspeed",
1838      "name": "MiniMax M2.5 High Speed",
1839      "cost_per_1m_in": 0.6,
1840      "cost_per_1m_out": 2.4,
1841      "cost_per_1m_in_cached": 0.03,
1842      "cost_per_1m_out_cached": 0.375,
1843      "context_window": 204800,
1844      "default_max_tokens": 8000,
1845      "can_reason": true,
1846      "reasoning_levels": [
1847        "low",
1848        "medium",
1849        "high"
1850      ],
1851      "default_reasoning_effort": "medium",
1852      "supports_attachments": false,
1853      "options": {}
1854    },
1855    {
1856      "id": "mistral/ministral-3b",
1857      "name": "Ministral 3B",
1858      "cost_per_1m_in": 0.1,
1859      "cost_per_1m_out": 0.1,
1860      "cost_per_1m_in_cached": 0,
1861      "cost_per_1m_out_cached": 0,
1862      "context_window": 128000,
1863      "default_max_tokens": 4000,
1864      "can_reason": false,
1865      "supports_attachments": false,
1866      "options": {}
1867    },
1868    {
1869      "id": "mistral/ministral-8b",
1870      "name": "Ministral 8B",
1871      "cost_per_1m_in": 0.15,
1872      "cost_per_1m_out": 0.15,
1873      "cost_per_1m_in_cached": 0,
1874      "cost_per_1m_out_cached": 0,
1875      "context_window": 128000,
1876      "default_max_tokens": 4000,
1877      "can_reason": false,
1878      "supports_attachments": false,
1879      "options": {}
1880    },
1881    {
1882      "id": "mistral/codestral",
1883      "name": "Mistral Codestral",
1884      "cost_per_1m_in": 0.3,
1885      "cost_per_1m_out": 0.9,
1886      "cost_per_1m_in_cached": 0,
1887      "cost_per_1m_out_cached": 0,
1888      "context_window": 128000,
1889      "default_max_tokens": 4000,
1890      "can_reason": false,
1891      "supports_attachments": false,
1892      "options": {}
1893    },
1894    {
1895      "id": "mistral/mistral-medium",
1896      "name": "Mistral Medium 3.1",
1897      "cost_per_1m_in": 0.4,
1898      "cost_per_1m_out": 2,
1899      "cost_per_1m_in_cached": 0,
1900      "cost_per_1m_out_cached": 0,
1901      "context_window": 128000,
1902      "default_max_tokens": 8000,
1903      "can_reason": false,
1904      "supports_attachments": true,
1905      "options": {}
1906    },
1907    {
1908      "id": "mistral/mistral-small",
1909      "name": "Mistral Small",
1910      "cost_per_1m_in": 0.1,
1911      "cost_per_1m_out": 0.3,
1912      "cost_per_1m_in_cached": 0,
1913      "cost_per_1m_out_cached": 0,
1914      "context_window": 32000,
1915      "default_max_tokens": 4000,
1916      "can_reason": false,
1917      "supports_attachments": true,
1918      "options": {}
1919    },
1920    {
1921      "id": "nvidia/nemotron-nano-12b-v2-vl",
1922      "name": "Nvidia Nemotron Nano 12B V2 VL",
1923      "cost_per_1m_in": 0.2,
1924      "cost_per_1m_out": 0.6,
1925      "cost_per_1m_in_cached": 0,
1926      "cost_per_1m_out_cached": 0,
1927      "context_window": 131072,
1928      "default_max_tokens": 8000,
1929      "can_reason": true,
1930      "reasoning_levels": [
1931        "low",
1932        "medium",
1933        "high"
1934      ],
1935      "default_reasoning_effort": "medium",
1936      "supports_attachments": true,
1937      "options": {}
1938    },
1939    {
1940      "id": "nvidia/nemotron-nano-9b-v2",
1941      "name": "Nvidia Nemotron Nano 9B V2",
1942      "cost_per_1m_in": 0.06,
1943      "cost_per_1m_out": 0.23,
1944      "cost_per_1m_in_cached": 0,
1945      "cost_per_1m_out_cached": 0,
1946      "context_window": 131072,
1947      "default_max_tokens": 8000,
1948      "can_reason": true,
1949      "reasoning_levels": [
1950        "low",
1951        "medium",
1952        "high"
1953      ],
1954      "default_reasoning_effort": "medium",
1955      "supports_attachments": false,
1956      "options": {}
1957    },
1958    {
1959      "id": "mistral/pixtral-12b",
1960      "name": "Pixtral 12B 2409",
1961      "cost_per_1m_in": 0.15,
1962      "cost_per_1m_out": 0.15,
1963      "cost_per_1m_in_cached": 0,
1964      "cost_per_1m_out_cached": 0,
1965      "context_window": 128000,
1966      "default_max_tokens": 4000,
1967      "can_reason": false,
1968      "supports_attachments": true,
1969      "options": {}
1970    },
1971    {
1972      "id": "mistral/pixtral-large",
1973      "name": "Pixtral Large",
1974      "cost_per_1m_in": 2,
1975      "cost_per_1m_out": 6,
1976      "cost_per_1m_in_cached": 0,
1977      "cost_per_1m_out_cached": 0,
1978      "context_window": 128000,
1979      "default_max_tokens": 4000,
1980      "can_reason": false,
1981      "supports_attachments": true,
1982      "options": {}
1983    },
1984    {
1985      "id": "alibaba/qwen-3-32b",
1986      "name": "Qwen 3 32B",
1987      "cost_per_1m_in": 0.29,
1988      "cost_per_1m_out": 0.59,
1989      "cost_per_1m_in_cached": 0,
1990      "cost_per_1m_out_cached": 0,
1991      "context_window": 131072,
1992      "default_max_tokens": 8000,
1993      "can_reason": true,
1994      "reasoning_levels": [
1995        "low",
1996        "medium",
1997        "high"
1998      ],
1999      "default_reasoning_effort": "medium",
2000      "supports_attachments": false,
2001      "options": {}
2002    },
2003    {
2004      "id": "alibaba/qwen3-coder-30b-a3b",
2005      "name": "Qwen 3 Coder 30B A3B Instruct",
2006      "cost_per_1m_in": 0.15,
2007      "cost_per_1m_out": 0.6,
2008      "cost_per_1m_in_cached": 0,
2009      "cost_per_1m_out_cached": 0,
2010      "context_window": 262144,
2011      "default_max_tokens": 8000,
2012      "can_reason": true,
2013      "reasoning_levels": [
2014        "low",
2015        "medium",
2016        "high"
2017      ],
2018      "default_reasoning_effort": "medium",
2019      "supports_attachments": false,
2020      "options": {}
2021    },
2022    {
2023      "id": "alibaba/qwen3-max-thinking",
2024      "name": "Qwen 3 Max Thinking",
2025      "cost_per_1m_in": 1.2,
2026      "cost_per_1m_out": 6,
2027      "cost_per_1m_in_cached": 0.24,
2028      "cost_per_1m_out_cached": 0,
2029      "context_window": 256000,
2030      "default_max_tokens": 8000,
2031      "can_reason": true,
2032      "reasoning_levels": [
2033        "low",
2034        "medium",
2035        "high"
2036      ],
2037      "default_reasoning_effort": "medium",
2038      "supports_attachments": false,
2039      "options": {}
2040    },
2041    {
2042      "id": "alibaba/qwen3.5-flash",
2043      "name": "Qwen 3.5 Flash",
2044      "cost_per_1m_in": 0.1,
2045      "cost_per_1m_out": 0.4,
2046      "cost_per_1m_in_cached": 0.001,
2047      "cost_per_1m_out_cached": 0.125,
2048      "context_window": 1000000,
2049      "default_max_tokens": 8000,
2050      "can_reason": true,
2051      "reasoning_levels": [
2052        "low",
2053        "medium",
2054        "high"
2055      ],
2056      "default_reasoning_effort": "medium",
2057      "supports_attachments": true,
2058      "options": {}
2059    },
2060    {
2061      "id": "alibaba/qwen3.5-plus",
2062      "name": "Qwen 3.5 Plus",
2063      "cost_per_1m_in": 0.4,
2064      "cost_per_1m_out": 2.4,
2065      "cost_per_1m_in_cached": 0.04,
2066      "cost_per_1m_out_cached": 0.5,
2067      "context_window": 1000000,
2068      "default_max_tokens": 8000,
2069      "can_reason": true,
2070      "reasoning_levels": [
2071        "low",
2072        "medium",
2073        "high"
2074      ],
2075      "default_reasoning_effort": "medium",
2076      "supports_attachments": true,
2077      "options": {}
2078    },
2079    {
2080      "id": "alibaba/qwen3-235b-a22b-thinking",
2081      "name": "Qwen3 235B A22B Thinking 2507",
2082      "cost_per_1m_in": 0.3,
2083      "cost_per_1m_out": 2.9,
2084      "cost_per_1m_in_cached": 0,
2085      "cost_per_1m_out_cached": 0,
2086      "context_window": 262114,
2087      "default_max_tokens": 8000,
2088      "can_reason": true,
2089      "reasoning_levels": [
2090        "low",
2091        "medium",
2092        "high"
2093      ],
2094      "default_reasoning_effort": "medium",
2095      "supports_attachments": true,
2096      "options": {}
2097    },
2098    {
2099      "id": "alibaba/qwen3-coder",
2100      "name": "Qwen3 Coder 480B A35B Instruct",
2101      "cost_per_1m_in": 0.4,
2102      "cost_per_1m_out": 1.6,
2103      "cost_per_1m_in_cached": 0,
2104      "cost_per_1m_out_cached": 0,
2105      "context_window": 262144,
2106      "default_max_tokens": 8000,
2107      "can_reason": false,
2108      "supports_attachments": false,
2109      "options": {}
2110    },
2111    {
2112      "id": "alibaba/qwen3-coder-next",
2113      "name": "Qwen3 Coder Next",
2114      "cost_per_1m_in": 0.5,
2115      "cost_per_1m_out": 1.2,
2116      "cost_per_1m_in_cached": 0,
2117      "cost_per_1m_out_cached": 0,
2118      "context_window": 256000,
2119      "default_max_tokens": 8000,
2120      "can_reason": false,
2121      "supports_attachments": false,
2122      "options": {}
2123    },
2124    {
2125      "id": "alibaba/qwen3-coder-plus",
2126      "name": "Qwen3 Coder Plus",
2127      "cost_per_1m_in": 1,
2128      "cost_per_1m_out": 5,
2129      "cost_per_1m_in_cached": 0.2,
2130      "cost_per_1m_out_cached": 0,
2131      "context_window": 1000000,
2132      "default_max_tokens": 8000,
2133      "can_reason": false,
2134      "supports_attachments": false,
2135      "options": {}
2136    },
2137    {
2138      "id": "alibaba/qwen3-max",
2139      "name": "Qwen3 Max",
2140      "cost_per_1m_in": 1.2,
2141      "cost_per_1m_out": 6,
2142      "cost_per_1m_in_cached": 0.24,
2143      "cost_per_1m_out_cached": 0,
2144      "context_window": 262144,
2145      "default_max_tokens": 8000,
2146      "can_reason": false,
2147      "supports_attachments": false,
2148      "options": {}
2149    },
2150    {
2151      "id": "alibaba/qwen3-max-preview",
2152      "name": "Qwen3 Max Preview",
2153      "cost_per_1m_in": 1.2,
2154      "cost_per_1m_out": 6,
2155      "cost_per_1m_in_cached": 0.24,
2156      "cost_per_1m_out_cached": 0,
2157      "context_window": 262144,
2158      "default_max_tokens": 8000,
2159      "can_reason": false,
2160      "supports_attachments": false,
2161      "options": {}
2162    },
2163    {
2164      "id": "alibaba/qwen3-vl-thinking",
2165      "name": "Qwen3 VL 235B A22B Thinking",
2166      "cost_per_1m_in": 0.22,
2167      "cost_per_1m_out": 0.88,
2168      "cost_per_1m_in_cached": 0,
2169      "cost_per_1m_out_cached": 0,
2170      "context_window": 256000,
2171      "default_max_tokens": 8000,
2172      "can_reason": true,
2173      "reasoning_levels": [
2174        "low",
2175        "medium",
2176        "high"
2177      ],
2178      "default_reasoning_effort": "medium",
2179      "supports_attachments": true,
2180      "options": {}
2181    },
2182    {
2183      "id": "alibaba/qwen-3-14b",
2184      "name": "Qwen3-14B",
2185      "cost_per_1m_in": 0.06,
2186      "cost_per_1m_out": 0.24,
2187      "cost_per_1m_in_cached": 0,
2188      "cost_per_1m_out_cached": 0,
2189      "context_window": 40960,
2190      "default_max_tokens": 8000,
2191      "can_reason": true,
2192      "reasoning_levels": [
2193        "low",
2194        "medium",
2195        "high"
2196      ],
2197      "default_reasoning_effort": "medium",
2198      "supports_attachments": false,
2199      "options": {}
2200    },
2201    {
2202      "id": "alibaba/qwen-3-235b",
2203      "name": "Qwen3-235B-A22B",
2204      "cost_per_1m_in": 0.071,
2205      "cost_per_1m_out": 0.463,
2206      "cost_per_1m_in_cached": 0,
2207      "cost_per_1m_out_cached": 0,
2208      "context_window": 40960,
2209      "default_max_tokens": 8000,
2210      "can_reason": false,
2211      "supports_attachments": false,
2212      "options": {}
2213    },
2214    {
2215      "id": "alibaba/qwen-3-30b",
2216      "name": "Qwen3-30B-A3B",
2217      "cost_per_1m_in": 0.08,
2218      "cost_per_1m_out": 0.29,
2219      "cost_per_1m_in_cached": 0,
2220      "cost_per_1m_out_cached": 0,
2221      "context_window": 40960,
2222      "default_max_tokens": 8000,
2223      "can_reason": true,
2224      "reasoning_levels": [
2225        "low",
2226        "medium",
2227        "high"
2228      ],
2229      "default_reasoning_effort": "medium",
2230      "supports_attachments": false,
2231      "options": {}
2232    },
2233    {
2234      "id": "bytedance/seed-1.6",
2235      "name": "Seed 1.6",
2236      "cost_per_1m_in": 0.25,
2237      "cost_per_1m_out": 2,
2238      "cost_per_1m_in_cached": 0.05,
2239      "cost_per_1m_out_cached": 0,
2240      "context_window": 256000,
2241      "default_max_tokens": 8000,
2242      "can_reason": true,
2243      "reasoning_levels": [
2244        "low",
2245        "medium",
2246        "high"
2247      ],
2248      "default_reasoning_effort": "medium",
2249      "supports_attachments": false,
2250      "options": {}
2251    },
2252    {
2253      "id": "perplexity/sonar",
2254      "name": "Sonar",
2255      "cost_per_1m_in": 1,
2256      "cost_per_1m_out": 1,
2257      "cost_per_1m_in_cached": 0,
2258      "cost_per_1m_out_cached": 0,
2259      "context_window": 127000,
2260      "default_max_tokens": 8000,
2261      "can_reason": false,
2262      "supports_attachments": true,
2263      "options": {}
2264    },
2265    {
2266      "id": "perplexity/sonar-pro",
2267      "name": "Sonar Pro",
2268      "cost_per_1m_in": 3,
2269      "cost_per_1m_out": 15,
2270      "cost_per_1m_in_cached": 0,
2271      "cost_per_1m_out_cached": 0,
2272      "context_window": 200000,
2273      "default_max_tokens": 8000,
2274      "can_reason": false,
2275      "supports_attachments": true,
2276      "options": {}
2277    },
2278    {
2279      "id": "arcee-ai/trinity-large-preview",
2280      "name": "Trinity Large Preview",
2281      "cost_per_1m_in": 0.25,
2282      "cost_per_1m_out": 1,
2283      "cost_per_1m_in_cached": 0,
2284      "cost_per_1m_out_cached": 0,
2285      "context_window": 131000,
2286      "default_max_tokens": 8000,
2287      "can_reason": false,
2288      "supports_attachments": false,
2289      "options": {}
2290    },
2291    {
2292      "id": "openai/gpt-oss-20b",
2293      "name": "gpt-oss-20b",
2294      "cost_per_1m_in": 0.07,
2295      "cost_per_1m_out": 0.3,
2296      "cost_per_1m_in_cached": 0,
2297      "cost_per_1m_out_cached": 0,
2298      "context_window": 128000,
2299      "default_max_tokens": 8000,
2300      "can_reason": true,
2301      "reasoning_levels": [
2302        "low",
2303        "medium",
2304        "high"
2305      ],
2306      "default_reasoning_effort": "medium",
2307      "supports_attachments": false,
2308      "options": {}
2309    },
2310    {
2311      "id": "openai/gpt-oss-safeguard-20b",
2312      "name": "gpt-oss-safeguard-20b",
2313      "cost_per_1m_in": 0.075,
2314      "cost_per_1m_out": 0.3,
2315      "cost_per_1m_in_cached": 0.037,
2316      "cost_per_1m_out_cached": 0,
2317      "context_window": 131072,
2318      "default_max_tokens": 8000,
2319      "can_reason": true,
2320      "reasoning_levels": [
2321        "low",
2322        "medium",
2323        "high"
2324      ],
2325      "default_reasoning_effort": "medium",
2326      "supports_attachments": false,
2327      "options": {}
2328    },
2329    {
2330      "id": "openai/o1",
2331      "name": "o1",
2332      "cost_per_1m_in": 15,
2333      "cost_per_1m_out": 60,
2334      "cost_per_1m_in_cached": 7.5,
2335      "cost_per_1m_out_cached": 0,
2336      "context_window": 200000,
2337      "default_max_tokens": 8000,
2338      "can_reason": true,
2339      "reasoning_levels": [
2340        "low",
2341        "medium",
2342        "high"
2343      ],
2344      "default_reasoning_effort": "medium",
2345      "supports_attachments": true,
2346      "options": {}
2347    },
2348    {
2349      "id": "openai/o3",
2350      "name": "o3",
2351      "cost_per_1m_in": 2,
2352      "cost_per_1m_out": 8,
2353      "cost_per_1m_in_cached": 0.5,
2354      "cost_per_1m_out_cached": 0,
2355      "context_window": 200000,
2356      "default_max_tokens": 8000,
2357      "can_reason": true,
2358      "reasoning_levels": [
2359        "low",
2360        "medium",
2361        "high"
2362      ],
2363      "default_reasoning_effort": "medium",
2364      "supports_attachments": true,
2365      "options": {}
2366    },
2367    {
2368      "id": "openai/o3-pro",
2369      "name": "o3 Pro",
2370      "cost_per_1m_in": 20,
2371      "cost_per_1m_out": 80,
2372      "cost_per_1m_in_cached": 0,
2373      "cost_per_1m_out_cached": 0,
2374      "context_window": 200000,
2375      "default_max_tokens": 8000,
2376      "can_reason": true,
2377      "reasoning_levels": [
2378        "low",
2379        "medium",
2380        "high"
2381      ],
2382      "default_reasoning_effort": "medium",
2383      "supports_attachments": true,
2384      "options": {}
2385    },
2386    {
2387      "id": "openai/o3-deep-research",
2388      "name": "o3-deep-research",
2389      "cost_per_1m_in": 10,
2390      "cost_per_1m_out": 40,
2391      "cost_per_1m_in_cached": 2.5,
2392      "cost_per_1m_out_cached": 0,
2393      "context_window": 200000,
2394      "default_max_tokens": 8000,
2395      "can_reason": true,
2396      "reasoning_levels": [
2397        "low",
2398        "medium",
2399        "high"
2400      ],
2401      "default_reasoning_effort": "medium",
2402      "supports_attachments": true,
2403      "options": {}
2404    },
2405    {
2406      "id": "openai/o3-mini",
2407      "name": "o3-mini",
2408      "cost_per_1m_in": 1.1,
2409      "cost_per_1m_out": 4.4,
2410      "cost_per_1m_in_cached": 0.55,
2411      "cost_per_1m_out_cached": 0,
2412      "context_window": 200000,
2413      "default_max_tokens": 8000,
2414      "can_reason": true,
2415      "reasoning_levels": [
2416        "low",
2417        "medium",
2418        "high"
2419      ],
2420      "default_reasoning_effort": "medium",
2421      "supports_attachments": false,
2422      "options": {}
2423    },
2424    {
2425      "id": "openai/o4-mini",
2426      "name": "o4-mini",
2427      "cost_per_1m_in": 1.1,
2428      "cost_per_1m_out": 4.4,
2429      "cost_per_1m_in_cached": 0.275,
2430      "cost_per_1m_out_cached": 0,
2431      "context_window": 200000,
2432      "default_max_tokens": 8000,
2433      "can_reason": true,
2434      "reasoning_levels": [
2435        "low",
2436        "medium",
2437        "high"
2438      ],
2439      "default_reasoning_effort": "medium",
2440      "supports_attachments": true,
2441      "options": {}
2442    },
2443    {
2444      "id": "vercel/v0-1.0-md",
2445      "name": "v0-1.0-md",
2446      "cost_per_1m_in": 3,
2447      "cost_per_1m_out": 15,
2448      "cost_per_1m_in_cached": 0,
2449      "cost_per_1m_out_cached": 0,
2450      "context_window": 128000,
2451      "default_max_tokens": 8000,
2452      "can_reason": false,
2453      "supports_attachments": true,
2454      "options": {}
2455    },
2456    {
2457      "id": "vercel/v0-1.5-md",
2458      "name": "v0-1.5-md",
2459      "cost_per_1m_in": 3,
2460      "cost_per_1m_out": 15,
2461      "cost_per_1m_in_cached": 0,
2462      "cost_per_1m_out_cached": 0,
2463      "context_window": 128000,
2464      "default_max_tokens": 8000,
2465      "can_reason": false,
2466      "supports_attachments": true,
2467      "options": {}
2468    }
2469  ],
2470  "default_headers": {
2471    "HTTP-Referer": "https://charm.land",
2472    "X-Title": "Crush"
2473  }
2474}