vercel.json

   1{
   2  "name": "Vercel",
   3  "id": "vercel",
   4  "api_key": "$VERCEL_API_KEY",
   5  "api_endpoint": "https://ai-gateway.vercel.sh/v1",
   6  "type": "vercel",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-haiku-4.5",
   9  "models": [
  10    {
  11      "id": "anthropic/claude-3-haiku",
  12      "name": "Claude 3 Haiku",
  13      "cost_per_1m_in": 0.25,
  14      "cost_per_1m_out": 1.25,
  15      "cost_per_1m_in_cached": 0.03,
  16      "cost_per_1m_out_cached": 0.3,
  17      "context_window": 200000,
  18      "default_max_tokens": 4096,
  19      "can_reason": false,
  20      "supports_attachments": true,
  21      "options": {}
  22    },
  23    {
  24      "id": "anthropic/claude-3.5-haiku",
  25      "name": "Claude 3.5 Haiku",
  26      "cost_per_1m_in": 0.8,
  27      "cost_per_1m_out": 4,
  28      "cost_per_1m_in_cached": 0.08,
  29      "cost_per_1m_out_cached": 1,
  30      "context_window": 200000,
  31      "default_max_tokens": 8000,
  32      "can_reason": false,
  33      "supports_attachments": true,
  34      "options": {}
  35    },
  36    {
  37      "id": "anthropic/claude-3.5-sonnet",
  38      "name": "Claude 3.5 Sonnet",
  39      "cost_per_1m_in": 3,
  40      "cost_per_1m_out": 15,
  41      "cost_per_1m_in_cached": 0.3,
  42      "cost_per_1m_out_cached": 3.75,
  43      "context_window": 200000,
  44      "default_max_tokens": 8000,
  45      "can_reason": false,
  46      "supports_attachments": true,
  47      "options": {}
  48    },
  49    {
  50      "id": "anthropic/claude-3.5-sonnet-20240620",
  51      "name": "Claude 3.5 Sonnet (2024-06-20)",
  52      "cost_per_1m_in": 3,
  53      "cost_per_1m_out": 15,
  54      "cost_per_1m_in_cached": 0.3,
  55      "cost_per_1m_out_cached": 3.75,
  56      "context_window": 200000,
  57      "default_max_tokens": 8000,
  58      "can_reason": false,
  59      "supports_attachments": true,
  60      "options": {}
  61    },
  62    {
  63      "id": "anthropic/claude-3.7-sonnet",
  64      "name": "Claude 3.7 Sonnet",
  65      "cost_per_1m_in": 3,
  66      "cost_per_1m_out": 15,
  67      "cost_per_1m_in_cached": 0.3,
  68      "cost_per_1m_out_cached": 3.75,
  69      "context_window": 200000,
  70      "default_max_tokens": 8000,
  71      "can_reason": true,
  72      "reasoning_levels": [
  73        "none",
  74        "minimal",
  75        "low",
  76        "medium",
  77        "high",
  78        "xhigh"
  79      ],
  80      "default_reasoning_effort": "medium",
  81      "supports_attachments": true,
  82      "options": {}
  83    },
  84    {
  85      "id": "anthropic/claude-haiku-4.5",
  86      "name": "Claude Haiku 4.5",
  87      "cost_per_1m_in": 1,
  88      "cost_per_1m_out": 5,
  89      "cost_per_1m_in_cached": 0.1,
  90      "cost_per_1m_out_cached": 1.25,
  91      "context_window": 200000,
  92      "default_max_tokens": 8000,
  93      "can_reason": true,
  94      "reasoning_levels": [
  95        "none",
  96        "minimal",
  97        "low",
  98        "medium",
  99        "high",
 100        "xhigh"
 101      ],
 102      "default_reasoning_effort": "medium",
 103      "supports_attachments": true,
 104      "options": {}
 105    },
 106    {
 107      "id": "anthropic/claude-opus-4",
 108      "name": "Claude Opus 4",
 109      "cost_per_1m_in": 15,
 110      "cost_per_1m_out": 75,
 111      "cost_per_1m_in_cached": 1.5,
 112      "cost_per_1m_out_cached": 18.75,
 113      "context_window": 200000,
 114      "default_max_tokens": 8000,
 115      "can_reason": true,
 116      "reasoning_levels": [
 117        "none",
 118        "minimal",
 119        "low",
 120        "medium",
 121        "high",
 122        "xhigh"
 123      ],
 124      "default_reasoning_effort": "medium",
 125      "supports_attachments": true,
 126      "options": {}
 127    },
 128    {
 129      "id": "anthropic/claude-opus-4.1",
 130      "name": "Claude Opus 4.1",
 131      "cost_per_1m_in": 15,
 132      "cost_per_1m_out": 75,
 133      "cost_per_1m_in_cached": 1.5,
 134      "cost_per_1m_out_cached": 18.75,
 135      "context_window": 200000,
 136      "default_max_tokens": 8000,
 137      "can_reason": true,
 138      "reasoning_levels": [
 139        "none",
 140        "minimal",
 141        "low",
 142        "medium",
 143        "high",
 144        "xhigh"
 145      ],
 146      "default_reasoning_effort": "medium",
 147      "supports_attachments": true,
 148      "options": {}
 149    },
 150    {
 151      "id": "anthropic/claude-opus-4.5",
 152      "name": "Claude Opus 4.5",
 153      "cost_per_1m_in": 5,
 154      "cost_per_1m_out": 25,
 155      "cost_per_1m_in_cached": 0.5,
 156      "cost_per_1m_out_cached": 6.25,
 157      "context_window": 200000,
 158      "default_max_tokens": 8000,
 159      "can_reason": true,
 160      "reasoning_levels": [
 161        "none",
 162        "minimal",
 163        "low",
 164        "medium",
 165        "high",
 166        "xhigh"
 167      ],
 168      "default_reasoning_effort": "medium",
 169      "supports_attachments": true,
 170      "options": {}
 171    },
 172    {
 173      "id": "anthropic/claude-opus-4.6",
 174      "name": "Claude Opus 4.6",
 175      "cost_per_1m_in": 5,
 176      "cost_per_1m_out": 25,
 177      "cost_per_1m_in_cached": 0.5,
 178      "cost_per_1m_out_cached": 6.25,
 179      "context_window": 1000000,
 180      "default_max_tokens": 8000,
 181      "can_reason": true,
 182      "reasoning_levels": [
 183        "none",
 184        "minimal",
 185        "low",
 186        "medium",
 187        "high",
 188        "xhigh"
 189      ],
 190      "default_reasoning_effort": "medium",
 191      "supports_attachments": true,
 192      "options": {}
 193    },
 194    {
 195      "id": "anthropic/claude-sonnet-4",
 196      "name": "Claude Sonnet 4",
 197      "cost_per_1m_in": 3,
 198      "cost_per_1m_out": 15,
 199      "cost_per_1m_in_cached": 0.3,
 200      "cost_per_1m_out_cached": 3.75,
 201      "context_window": 1000000,
 202      "default_max_tokens": 8000,
 203      "can_reason": true,
 204      "reasoning_levels": [
 205        "none",
 206        "minimal",
 207        "low",
 208        "medium",
 209        "high",
 210        "xhigh"
 211      ],
 212      "default_reasoning_effort": "medium",
 213      "supports_attachments": true,
 214      "options": {}
 215    },
 216    {
 217      "id": "anthropic/claude-sonnet-4.5",
 218      "name": "Claude Sonnet 4.5",
 219      "cost_per_1m_in": 3,
 220      "cost_per_1m_out": 15,
 221      "cost_per_1m_in_cached": 0.3,
 222      "cost_per_1m_out_cached": 3.75,
 223      "context_window": 1000000,
 224      "default_max_tokens": 8000,
 225      "can_reason": true,
 226      "reasoning_levels": [
 227        "none",
 228        "minimal",
 229        "low",
 230        "medium",
 231        "high",
 232        "xhigh"
 233      ],
 234      "default_reasoning_effort": "medium",
 235      "supports_attachments": true,
 236      "options": {}
 237    },
 238    {
 239      "id": "anthropic/claude-sonnet-4.6",
 240      "name": "Claude Sonnet 4.6",
 241      "cost_per_1m_in": 3,
 242      "cost_per_1m_out": 15,
 243      "cost_per_1m_in_cached": 0.3,
 244      "cost_per_1m_out_cached": 3.75,
 245      "context_window": 1000000,
 246      "default_max_tokens": 8000,
 247      "can_reason": true,
 248      "reasoning_levels": [
 249        "none",
 250        "minimal",
 251        "low",
 252        "medium",
 253        "high",
 254        "xhigh"
 255      ],
 256      "default_reasoning_effort": "medium",
 257      "supports_attachments": true,
 258      "options": {}
 259    },
 260    {
 261      "id": "cohere/command-a",
 262      "name": "Command A",
 263      "cost_per_1m_in": 2.5,
 264      "cost_per_1m_out": 10,
 265      "cost_per_1m_in_cached": 0,
 266      "cost_per_1m_out_cached": 0,
 267      "context_window": 256000,
 268      "default_max_tokens": 8000,
 269      "can_reason": false,
 270      "supports_attachments": false,
 271      "options": {}
 272    },
 273    {
 274      "id": "deepseek/deepseek-v3",
 275      "name": "DeepSeek V3 0324",
 276      "cost_per_1m_in": 0.77,
 277      "cost_per_1m_out": 0.77,
 278      "cost_per_1m_in_cached": 0,
 279      "cost_per_1m_out_cached": 0,
 280      "context_window": 163840,
 281      "default_max_tokens": 8000,
 282      "can_reason": false,
 283      "supports_attachments": false,
 284      "options": {}
 285    },
 286    {
 287      "id": "deepseek/deepseek-v3.1-terminus",
 288      "name": "DeepSeek V3.1 Terminus",
 289      "cost_per_1m_in": 0.27,
 290      "cost_per_1m_out": 1,
 291      "cost_per_1m_in_cached": 0,
 292      "cost_per_1m_out_cached": 0,
 293      "context_window": 131072,
 294      "default_max_tokens": 8000,
 295      "can_reason": true,
 296      "reasoning_levels": [
 297        "low",
 298        "medium",
 299        "high"
 300      ],
 301      "default_reasoning_effort": "medium",
 302      "supports_attachments": false,
 303      "options": {}
 304    },
 305    {
 306      "id": "deepseek/deepseek-v3.2",
 307      "name": "DeepSeek V3.2",
 308      "cost_per_1m_in": 0.26,
 309      "cost_per_1m_out": 0.38,
 310      "cost_per_1m_in_cached": 0.13,
 311      "cost_per_1m_out_cached": 0,
 312      "context_window": 128000,
 313      "default_max_tokens": 8000,
 314      "can_reason": false,
 315      "supports_attachments": false,
 316      "options": {}
 317    },
 318    {
 319      "id": "deepseek/deepseek-v3.2-thinking",
 320      "name": "DeepSeek V3.2 Thinking",
 321      "cost_per_1m_in": 0.28,
 322      "cost_per_1m_out": 0.42,
 323      "cost_per_1m_in_cached": 0.028,
 324      "cost_per_1m_out_cached": 0,
 325      "context_window": 128000,
 326      "default_max_tokens": 8000,
 327      "can_reason": true,
 328      "reasoning_levels": [
 329        "low",
 330        "medium",
 331        "high"
 332      ],
 333      "default_reasoning_effort": "medium",
 334      "supports_attachments": false,
 335      "options": {}
 336    },
 337    {
 338      "id": "deepseek/deepseek-r1",
 339      "name": "DeepSeek-R1",
 340      "cost_per_1m_in": 1.35,
 341      "cost_per_1m_out": 5.4,
 342      "cost_per_1m_in_cached": 0,
 343      "cost_per_1m_out_cached": 0,
 344      "context_window": 128000,
 345      "default_max_tokens": 8000,
 346      "can_reason": true,
 347      "reasoning_levels": [
 348        "low",
 349        "medium",
 350        "high"
 351      ],
 352      "default_reasoning_effort": "medium",
 353      "supports_attachments": false,
 354      "options": {}
 355    },
 356    {
 357      "id": "deepseek/deepseek-v3.1",
 358      "name": "DeepSeek-V3.1",
 359      "cost_per_1m_in": 0.5,
 360      "cost_per_1m_out": 1.5,
 361      "cost_per_1m_in_cached": 0,
 362      "cost_per_1m_out_cached": 0,
 363      "context_window": 163840,
 364      "default_max_tokens": 8000,
 365      "can_reason": true,
 366      "reasoning_levels": [
 367        "low",
 368        "medium",
 369        "high"
 370      ],
 371      "default_reasoning_effort": "medium",
 372      "supports_attachments": false,
 373      "options": {}
 374    },
 375    {
 376      "id": "mistral/devstral-2",
 377      "name": "Devstral 2",
 378      "cost_per_1m_in": 0,
 379      "cost_per_1m_out": 0,
 380      "cost_per_1m_in_cached": 0,
 381      "cost_per_1m_out_cached": 0,
 382      "context_window": 256000,
 383      "default_max_tokens": 8000,
 384      "can_reason": false,
 385      "supports_attachments": false,
 386      "options": {}
 387    },
 388    {
 389      "id": "mistral/devstral-small",
 390      "name": "Devstral Small 1.1",
 391      "cost_per_1m_in": 0.1,
 392      "cost_per_1m_out": 0.3,
 393      "cost_per_1m_in_cached": 0,
 394      "cost_per_1m_out_cached": 0,
 395      "context_window": 128000,
 396      "default_max_tokens": 8000,
 397      "can_reason": false,
 398      "supports_attachments": false,
 399      "options": {}
 400    },
 401    {
 402      "id": "mistral/devstral-small-2",
 403      "name": "Devstral Small 2",
 404      "cost_per_1m_in": 0,
 405      "cost_per_1m_out": 0,
 406      "cost_per_1m_in_cached": 0,
 407      "cost_per_1m_out_cached": 0,
 408      "context_window": 256000,
 409      "default_max_tokens": 8000,
 410      "can_reason": false,
 411      "supports_attachments": false,
 412      "options": {}
 413    },
 414    {
 415      "id": "zai/glm-4.5-air",
 416      "name": "GLM 4.5 Air",
 417      "cost_per_1m_in": 0.2,
 418      "cost_per_1m_out": 1.1,
 419      "cost_per_1m_in_cached": 0.03,
 420      "cost_per_1m_out_cached": 0,
 421      "context_window": 128000,
 422      "default_max_tokens": 8000,
 423      "can_reason": true,
 424      "reasoning_levels": [
 425        "low",
 426        "medium",
 427        "high"
 428      ],
 429      "default_reasoning_effort": "medium",
 430      "supports_attachments": false,
 431      "options": {}
 432    },
 433    {
 434      "id": "zai/glm-4.5v",
 435      "name": "GLM 4.5V",
 436      "cost_per_1m_in": 0.6,
 437      "cost_per_1m_out": 1.8,
 438      "cost_per_1m_in_cached": 0.11,
 439      "cost_per_1m_out_cached": 0,
 440      "context_window": 66000,
 441      "default_max_tokens": 8000,
 442      "can_reason": false,
 443      "supports_attachments": true,
 444      "options": {}
 445    },
 446    {
 447      "id": "zai/glm-4.6",
 448      "name": "GLM 4.6",
 449      "cost_per_1m_in": 0.45,
 450      "cost_per_1m_out": 1.8,
 451      "cost_per_1m_in_cached": 0.11,
 452      "cost_per_1m_out_cached": 0,
 453      "context_window": 200000,
 454      "default_max_tokens": 8000,
 455      "can_reason": true,
 456      "reasoning_levels": [
 457        "low",
 458        "medium",
 459        "high"
 460      ],
 461      "default_reasoning_effort": "medium",
 462      "supports_attachments": false,
 463      "options": {}
 464    },
 465    {
 466      "id": "zai/glm-4.7",
 467      "name": "GLM 4.7",
 468      "cost_per_1m_in": 0.6,
 469      "cost_per_1m_out": 2.2,
 470      "cost_per_1m_in_cached": 0,
 471      "cost_per_1m_out_cached": 0,
 472      "context_window": 200000,
 473      "default_max_tokens": 8000,
 474      "can_reason": true,
 475      "reasoning_levels": [
 476        "low",
 477        "medium",
 478        "high"
 479      ],
 480      "default_reasoning_effort": "medium",
 481      "supports_attachments": false,
 482      "options": {}
 483    },
 484    {
 485      "id": "zai/glm-4.7-flash",
 486      "name": "GLM 4.7 Flash",
 487      "cost_per_1m_in": 0.07,
 488      "cost_per_1m_out": 0.4,
 489      "cost_per_1m_in_cached": 0,
 490      "cost_per_1m_out_cached": 0,
 491      "context_window": 200000,
 492      "default_max_tokens": 8000,
 493      "can_reason": true,
 494      "reasoning_levels": [
 495        "low",
 496        "medium",
 497        "high"
 498      ],
 499      "default_reasoning_effort": "medium",
 500      "supports_attachments": false,
 501      "options": {}
 502    },
 503    {
 504      "id": "zai/glm-4.7-flashx",
 505      "name": "GLM 4.7 FlashX",
 506      "cost_per_1m_in": 0.06,
 507      "cost_per_1m_out": 0.4,
 508      "cost_per_1m_in_cached": 0.01,
 509      "cost_per_1m_out_cached": 0,
 510      "context_window": 200000,
 511      "default_max_tokens": 8000,
 512      "can_reason": true,
 513      "reasoning_levels": [
 514        "low",
 515        "medium",
 516        "high"
 517      ],
 518      "default_reasoning_effort": "medium",
 519      "supports_attachments": false,
 520      "options": {}
 521    },
 522    {
 523      "id": "zai/glm-5",
 524      "name": "GLM 5",
 525      "cost_per_1m_in": 1,
 526      "cost_per_1m_out": 3.2,
 527      "cost_per_1m_in_cached": 0.2,
 528      "cost_per_1m_out_cached": 0,
 529      "context_window": 202800,
 530      "default_max_tokens": 8000,
 531      "can_reason": true,
 532      "reasoning_levels": [
 533        "low",
 534        "medium",
 535        "high"
 536      ],
 537      "default_reasoning_effort": "medium",
 538      "supports_attachments": false,
 539      "options": {}
 540    },
 541    {
 542      "id": "zai/glm-4.5",
 543      "name": "GLM-4.5",
 544      "cost_per_1m_in": 0.6,
 545      "cost_per_1m_out": 2.2,
 546      "cost_per_1m_in_cached": 0.11,
 547      "cost_per_1m_out_cached": 0,
 548      "context_window": 128000,
 549      "default_max_tokens": 8000,
 550      "can_reason": true,
 551      "reasoning_levels": [
 552        "low",
 553        "medium",
 554        "high"
 555      ],
 556      "default_reasoning_effort": "medium",
 557      "supports_attachments": false,
 558      "options": {}
 559    },
 560    {
 561      "id": "zai/glm-4.6v",
 562      "name": "GLM-4.6V",
 563      "cost_per_1m_in": 0.3,
 564      "cost_per_1m_out": 0.9,
 565      "cost_per_1m_in_cached": 0.05,
 566      "cost_per_1m_out_cached": 0,
 567      "context_window": 128000,
 568      "default_max_tokens": 8000,
 569      "can_reason": true,
 570      "reasoning_levels": [
 571        "low",
 572        "medium",
 573        "high"
 574      ],
 575      "default_reasoning_effort": "medium",
 576      "supports_attachments": true,
 577      "options": {}
 578    },
 579    {
 580      "id": "zai/glm-4.6v-flash",
 581      "name": "GLM-4.6V-Flash",
 582      "cost_per_1m_in": 0,
 583      "cost_per_1m_out": 0,
 584      "cost_per_1m_in_cached": 0,
 585      "cost_per_1m_out_cached": 0,
 586      "context_window": 128000,
 587      "default_max_tokens": 8000,
 588      "can_reason": true,
 589      "reasoning_levels": [
 590        "low",
 591        "medium",
 592        "high"
 593      ],
 594      "default_reasoning_effort": "medium",
 595      "supports_attachments": true,
 596      "options": {}
 597    },
 598    {
 599      "id": "openai/gpt-5-chat",
 600      "name": "GPT 5 Chat",
 601      "cost_per_1m_in": 1.25,
 602      "cost_per_1m_out": 10,
 603      "cost_per_1m_in_cached": 0.125,
 604      "cost_per_1m_out_cached": 0,
 605      "context_window": 128000,
 606      "default_max_tokens": 8000,
 607      "can_reason": true,
 608      "reasoning_levels": [
 609        "low",
 610        "medium",
 611        "high"
 612      ],
 613      "default_reasoning_effort": "medium",
 614      "supports_attachments": true,
 615      "options": {}
 616    },
 617    {
 618      "id": "openai/gpt-5.1-codex-max",
 619      "name": "GPT 5.1 Codex Max",
 620      "cost_per_1m_in": 1.25,
 621      "cost_per_1m_out": 10,
 622      "cost_per_1m_in_cached": 0.125,
 623      "cost_per_1m_out_cached": 0,
 624      "context_window": 400000,
 625      "default_max_tokens": 8000,
 626      "can_reason": true,
 627      "reasoning_levels": [
 628        "low",
 629        "medium",
 630        "high"
 631      ],
 632      "default_reasoning_effort": "medium",
 633      "supports_attachments": true,
 634      "options": {}
 635    },
 636    {
 637      "id": "openai/gpt-5.1-codex-mini",
 638      "name": "GPT 5.1 Codex Mini",
 639      "cost_per_1m_in": 0.25,
 640      "cost_per_1m_out": 2,
 641      "cost_per_1m_in_cached": 0.025,
 642      "cost_per_1m_out_cached": 0,
 643      "context_window": 400000,
 644      "default_max_tokens": 8000,
 645      "can_reason": true,
 646      "reasoning_levels": [
 647        "low",
 648        "medium",
 649        "high"
 650      ],
 651      "default_reasoning_effort": "medium",
 652      "supports_attachments": true,
 653      "options": {}
 654    },
 655    {
 656      "id": "openai/gpt-5.1-thinking",
 657      "name": "GPT 5.1 Thinking",
 658      "cost_per_1m_in": 1.25,
 659      "cost_per_1m_out": 10,
 660      "cost_per_1m_in_cached": 0.125,
 661      "cost_per_1m_out_cached": 0,
 662      "context_window": 400000,
 663      "default_max_tokens": 8000,
 664      "can_reason": true,
 665      "reasoning_levels": [
 666        "low",
 667        "medium",
 668        "high"
 669      ],
 670      "default_reasoning_effort": "medium",
 671      "supports_attachments": true,
 672      "options": {}
 673    },
 674    {
 675      "id": "openai/gpt-5.2",
 676      "name": "GPT 5.2",
 677      "cost_per_1m_in": 1.75,
 678      "cost_per_1m_out": 14,
 679      "cost_per_1m_in_cached": 0.175,
 680      "cost_per_1m_out_cached": 0,
 681      "context_window": 400000,
 682      "default_max_tokens": 8000,
 683      "can_reason": true,
 684      "reasoning_levels": [
 685        "low",
 686        "medium",
 687        "high"
 688      ],
 689      "default_reasoning_effort": "medium",
 690      "supports_attachments": true,
 691      "options": {}
 692    },
 693    {
 694      "id": "openai/gpt-5.2-pro",
 695      "name": "GPT 5.2 ",
 696      "cost_per_1m_in": 21,
 697      "cost_per_1m_out": 168,
 698      "cost_per_1m_in_cached": 0,
 699      "cost_per_1m_out_cached": 0,
 700      "context_window": 400000,
 701      "default_max_tokens": 8000,
 702      "can_reason": true,
 703      "reasoning_levels": [
 704        "low",
 705        "medium",
 706        "high"
 707      ],
 708      "default_reasoning_effort": "medium",
 709      "supports_attachments": true,
 710      "options": {}
 711    },
 712    {
 713      "id": "openai/gpt-5.2-chat",
 714      "name": "GPT 5.2 Chat",
 715      "cost_per_1m_in": 1.75,
 716      "cost_per_1m_out": 14,
 717      "cost_per_1m_in_cached": 0.175,
 718      "cost_per_1m_out_cached": 0,
 719      "context_window": 128000,
 720      "default_max_tokens": 8000,
 721      "can_reason": true,
 722      "reasoning_levels": [
 723        "low",
 724        "medium",
 725        "high"
 726      ],
 727      "default_reasoning_effort": "medium",
 728      "supports_attachments": true,
 729      "options": {}
 730    },
 731    {
 732      "id": "openai/gpt-5.2-codex",
 733      "name": "GPT 5.2 Codex",
 734      "cost_per_1m_in": 1.75,
 735      "cost_per_1m_out": 14,
 736      "cost_per_1m_in_cached": 0.175,
 737      "cost_per_1m_out_cached": 0,
 738      "context_window": 400000,
 739      "default_max_tokens": 8000,
 740      "can_reason": true,
 741      "reasoning_levels": [
 742        "low",
 743        "medium",
 744        "high"
 745      ],
 746      "default_reasoning_effort": "medium",
 747      "supports_attachments": true,
 748      "options": {}
 749    },
 750    {
 751      "id": "openai/gpt-5.3-codex",
 752      "name": "GPT 5.3 Codex",
 753      "cost_per_1m_in": 1.75,
 754      "cost_per_1m_out": 14,
 755      "cost_per_1m_in_cached": 0.175,
 756      "cost_per_1m_out_cached": 0,
 757      "context_window": 400000,
 758      "default_max_tokens": 8000,
 759      "can_reason": true,
 760      "reasoning_levels": [
 761        "low",
 762        "medium",
 763        "high"
 764      ],
 765      "default_reasoning_effort": "medium",
 766      "supports_attachments": true,
 767      "options": {}
 768    },
 769    {
 770      "id": "openai/gpt-5.4",
 771      "name": "GPT 5.4",
 772      "cost_per_1m_in": 2.5,
 773      "cost_per_1m_out": 15,
 774      "cost_per_1m_in_cached": 0.25,
 775      "cost_per_1m_out_cached": 0,
 776      "context_window": 1050000,
 777      "default_max_tokens": 8000,
 778      "can_reason": true,
 779      "reasoning_levels": [
 780        "low",
 781        "medium",
 782        "high"
 783      ],
 784      "default_reasoning_effort": "medium",
 785      "supports_attachments": true,
 786      "options": {}
 787    },
 788    {
 789      "id": "openai/gpt-5.4-pro",
 790      "name": "GPT 5.4 Pro",
 791      "cost_per_1m_in": 30,
 792      "cost_per_1m_out": 180,
 793      "cost_per_1m_in_cached": 0,
 794      "cost_per_1m_out_cached": 0,
 795      "context_window": 1050000,
 796      "default_max_tokens": 8000,
 797      "can_reason": true,
 798      "reasoning_levels": [
 799        "low",
 800        "medium",
 801        "high"
 802      ],
 803      "default_reasoning_effort": "medium",
 804      "supports_attachments": true,
 805      "options": {}
 806    },
 807    {
 808      "id": "openai/gpt-4-turbo",
 809      "name": "GPT-4 Turbo",
 810      "cost_per_1m_in": 10,
 811      "cost_per_1m_out": 30,
 812      "cost_per_1m_in_cached": 0,
 813      "cost_per_1m_out_cached": 0,
 814      "context_window": 128000,
 815      "default_max_tokens": 4096,
 816      "can_reason": false,
 817      "supports_attachments": true,
 818      "options": {}
 819    },
 820    {
 821      "id": "openai/gpt-4.1",
 822      "name": "GPT-4.1",
 823      "cost_per_1m_in": 2,
 824      "cost_per_1m_out": 8,
 825      "cost_per_1m_in_cached": 0.5,
 826      "cost_per_1m_out_cached": 0,
 827      "context_window": 1047576,
 828      "default_max_tokens": 8000,
 829      "can_reason": false,
 830      "supports_attachments": true,
 831      "options": {}
 832    },
 833    {
 834      "id": "openai/gpt-4.1-mini",
 835      "name": "GPT-4.1 mini",
 836      "cost_per_1m_in": 0.4,
 837      "cost_per_1m_out": 1.6,
 838      "cost_per_1m_in_cached": 0.1,
 839      "cost_per_1m_out_cached": 0,
 840      "context_window": 1047576,
 841      "default_max_tokens": 8000,
 842      "can_reason": false,
 843      "supports_attachments": true,
 844      "options": {}
 845    },
 846    {
 847      "id": "openai/gpt-4.1-nano",
 848      "name": "GPT-4.1 nano",
 849      "cost_per_1m_in": 0.1,
 850      "cost_per_1m_out": 0.4,
 851      "cost_per_1m_in_cached": 0.025,
 852      "cost_per_1m_out_cached": 0,
 853      "context_window": 1047576,
 854      "default_max_tokens": 8000,
 855      "can_reason": false,
 856      "supports_attachments": true,
 857      "options": {}
 858    },
 859    {
 860      "id": "openai/gpt-4o",
 861      "name": "GPT-4o",
 862      "cost_per_1m_in": 2.5,
 863      "cost_per_1m_out": 10,
 864      "cost_per_1m_in_cached": 1.25,
 865      "cost_per_1m_out_cached": 0,
 866      "context_window": 128000,
 867      "default_max_tokens": 8000,
 868      "can_reason": false,
 869      "supports_attachments": true,
 870      "options": {}
 871    },
 872    {
 873      "id": "openai/gpt-4o-mini",
 874      "name": "GPT-4o mini",
 875      "cost_per_1m_in": 0.15,
 876      "cost_per_1m_out": 0.6,
 877      "cost_per_1m_in_cached": 0.075,
 878      "cost_per_1m_out_cached": 0,
 879      "context_window": 128000,
 880      "default_max_tokens": 8000,
 881      "can_reason": false,
 882      "supports_attachments": true,
 883      "options": {}
 884    },
 885    {
 886      "id": "openai/gpt-5",
 887      "name": "GPT-5",
 888      "cost_per_1m_in": 1.25,
 889      "cost_per_1m_out": 10,
 890      "cost_per_1m_in_cached": 0.125,
 891      "cost_per_1m_out_cached": 0,
 892      "context_window": 400000,
 893      "default_max_tokens": 8000,
 894      "can_reason": true,
 895      "reasoning_levels": [
 896        "low",
 897        "medium",
 898        "high"
 899      ],
 900      "default_reasoning_effort": "medium",
 901      "supports_attachments": true,
 902      "options": {}
 903    },
 904    {
 905      "id": "openai/gpt-5-mini",
 906      "name": "GPT-5 mini",
 907      "cost_per_1m_in": 0.25,
 908      "cost_per_1m_out": 2,
 909      "cost_per_1m_in_cached": 0.025,
 910      "cost_per_1m_out_cached": 0,
 911      "context_window": 400000,
 912      "default_max_tokens": 8000,
 913      "can_reason": true,
 914      "reasoning_levels": [
 915        "low",
 916        "medium",
 917        "high"
 918      ],
 919      "default_reasoning_effort": "medium",
 920      "supports_attachments": true,
 921      "options": {}
 922    },
 923    {
 924      "id": "openai/gpt-5-nano",
 925      "name": "GPT-5 nano",
 926      "cost_per_1m_in": 0.05,
 927      "cost_per_1m_out": 0.4,
 928      "cost_per_1m_in_cached": 0.005,
 929      "cost_per_1m_out_cached": 0,
 930      "context_window": 400000,
 931      "default_max_tokens": 8000,
 932      "can_reason": true,
 933      "reasoning_levels": [
 934        "low",
 935        "medium",
 936        "high"
 937      ],
 938      "default_reasoning_effort": "medium",
 939      "supports_attachments": true,
 940      "options": {}
 941    },
 942    {
 943      "id": "openai/gpt-5-pro",
 944      "name": "GPT-5 pro",
 945      "cost_per_1m_in": 15,
 946      "cost_per_1m_out": 120,
 947      "cost_per_1m_in_cached": 0,
 948      "cost_per_1m_out_cached": 0,
 949      "context_window": 400000,
 950      "default_max_tokens": 8000,
 951      "can_reason": true,
 952      "reasoning_levels": [
 953        "low",
 954        "medium",
 955        "high"
 956      ],
 957      "default_reasoning_effort": "medium",
 958      "supports_attachments": true,
 959      "options": {}
 960    },
 961    {
 962      "id": "openai/gpt-5-codex",
 963      "name": "GPT-5-Codex",
 964      "cost_per_1m_in": 1.25,
 965      "cost_per_1m_out": 10,
 966      "cost_per_1m_in_cached": 0.125,
 967      "cost_per_1m_out_cached": 0,
 968      "context_window": 400000,
 969      "default_max_tokens": 8000,
 970      "can_reason": true,
 971      "reasoning_levels": [
 972        "low",
 973        "medium",
 974        "high"
 975      ],
 976      "default_reasoning_effort": "medium",
 977      "supports_attachments": false,
 978      "options": {}
 979    },
 980    {
 981      "id": "openai/gpt-5.1-instant",
 982      "name": "GPT-5.1 Instant",
 983      "cost_per_1m_in": 1.25,
 984      "cost_per_1m_out": 10,
 985      "cost_per_1m_in_cached": 0.125,
 986      "cost_per_1m_out_cached": 0,
 987      "context_window": 128000,
 988      "default_max_tokens": 8000,
 989      "can_reason": true,
 990      "reasoning_levels": [
 991        "low",
 992        "medium",
 993        "high"
 994      ],
 995      "default_reasoning_effort": "medium",
 996      "supports_attachments": true,
 997      "options": {}
 998    },
 999    {
1000      "id": "openai/gpt-5.1-codex",
1001      "name": "GPT-5.1-Codex",
1002      "cost_per_1m_in": 1.25,
1003      "cost_per_1m_out": 10,
1004      "cost_per_1m_in_cached": 0.125,
1005      "cost_per_1m_out_cached": 0,
1006      "context_window": 400000,
1007      "default_max_tokens": 8000,
1008      "can_reason": true,
1009      "reasoning_levels": [
1010        "low",
1011        "medium",
1012        "high"
1013      ],
1014      "default_reasoning_effort": "medium",
1015      "supports_attachments": true,
1016      "options": {}
1017    },
1018    {
1019      "id": "openai/gpt-5.3-chat",
1020      "name": "GPT-5.3 Chat",
1021      "cost_per_1m_in": 1.75,
1022      "cost_per_1m_out": 14,
1023      "cost_per_1m_in_cached": 0.175,
1024      "cost_per_1m_out_cached": 0,
1025      "context_window": 128000,
1026      "default_max_tokens": 8000,
1027      "can_reason": true,
1028      "reasoning_levels": [
1029        "low",
1030        "medium",
1031        "high"
1032      ],
1033      "default_reasoning_effort": "medium",
1034      "supports_attachments": true,
1035      "options": {}
1036    },
1037    {
1038      "id": "google/gemini-2.0-flash",
1039      "name": "Gemini 2.0 Flash",
1040      "cost_per_1m_in": 0.15,
1041      "cost_per_1m_out": 0.6,
1042      "cost_per_1m_in_cached": 0,
1043      "cost_per_1m_out_cached": 0,
1044      "context_window": 1048576,
1045      "default_max_tokens": 8000,
1046      "can_reason": false,
1047      "supports_attachments": true,
1048      "options": {}
1049    },
1050    {
1051      "id": "google/gemini-2.0-flash-lite",
1052      "name": "Gemini 2.0 Flash Lite",
1053      "cost_per_1m_in": 0.075,
1054      "cost_per_1m_out": 0.3,
1055      "cost_per_1m_in_cached": 0,
1056      "cost_per_1m_out_cached": 0,
1057      "context_window": 1048576,
1058      "default_max_tokens": 8000,
1059      "can_reason": false,
1060      "supports_attachments": true,
1061      "options": {}
1062    },
1063    {
1064      "id": "google/gemini-2.5-flash",
1065      "name": "Gemini 2.5 Flash",
1066      "cost_per_1m_in": 0.3,
1067      "cost_per_1m_out": 2.5,
1068      "cost_per_1m_in_cached": 0.03,
1069      "cost_per_1m_out_cached": 0,
1070      "context_window": 1000000,
1071      "default_max_tokens": 8000,
1072      "can_reason": true,
1073      "reasoning_levels": [
1074        "low",
1075        "medium",
1076        "high"
1077      ],
1078      "default_reasoning_effort": "medium",
1079      "supports_attachments": true,
1080      "options": {}
1081    },
1082    {
1083      "id": "google/gemini-2.5-flash-lite",
1084      "name": "Gemini 2.5 Flash Lite",
1085      "cost_per_1m_in": 0.1,
1086      "cost_per_1m_out": 0.4,
1087      "cost_per_1m_in_cached": 0.01,
1088      "cost_per_1m_out_cached": 0,
1089      "context_window": 1048576,
1090      "default_max_tokens": 8000,
1091      "can_reason": true,
1092      "reasoning_levels": [
1093        "low",
1094        "medium",
1095        "high"
1096      ],
1097      "default_reasoning_effort": "medium",
1098      "supports_attachments": true,
1099      "options": {}
1100    },
1101    {
1102      "id": "google/gemini-2.5-flash-lite-preview-09-2025",
1103      "name": "Gemini 2.5 Flash Lite Preview 09-2025",
1104      "cost_per_1m_in": 0.1,
1105      "cost_per_1m_out": 0.4,
1106      "cost_per_1m_in_cached": 0.01,
1107      "cost_per_1m_out_cached": 0,
1108      "context_window": 1048576,
1109      "default_max_tokens": 8000,
1110      "can_reason": true,
1111      "reasoning_levels": [
1112        "low",
1113        "medium",
1114        "high"
1115      ],
1116      "default_reasoning_effort": "medium",
1117      "supports_attachments": true,
1118      "options": {}
1119    },
1120    {
1121      "id": "google/gemini-2.5-flash-preview-09-2025",
1122      "name": "Gemini 2.5 Flash Preview 09-2025",
1123      "cost_per_1m_in": 0.3,
1124      "cost_per_1m_out": 2.5,
1125      "cost_per_1m_in_cached": 0.03,
1126      "cost_per_1m_out_cached": 0,
1127      "context_window": 1000000,
1128      "default_max_tokens": 8000,
1129      "can_reason": true,
1130      "reasoning_levels": [
1131        "low",
1132        "medium",
1133        "high"
1134      ],
1135      "default_reasoning_effort": "medium",
1136      "supports_attachments": true,
1137      "options": {}
1138    },
1139    {
1140      "id": "google/gemini-2.5-pro",
1141      "name": "Gemini 2.5 Pro",
1142      "cost_per_1m_in": 1.25,
1143      "cost_per_1m_out": 10,
1144      "cost_per_1m_in_cached": 0.125,
1145      "cost_per_1m_out_cached": 0,
1146      "context_window": 1048576,
1147      "default_max_tokens": 8000,
1148      "can_reason": true,
1149      "reasoning_levels": [
1150        "low",
1151        "medium",
1152        "high"
1153      ],
1154      "default_reasoning_effort": "medium",
1155      "supports_attachments": true,
1156      "options": {}
1157    },
1158    {
1159      "id": "google/gemini-3-flash",
1160      "name": "Gemini 3 Flash",
1161      "cost_per_1m_in": 0.5,
1162      "cost_per_1m_out": 3,
1163      "cost_per_1m_in_cached": 0.05,
1164      "cost_per_1m_out_cached": 0,
1165      "context_window": 1000000,
1166      "default_max_tokens": 8000,
1167      "can_reason": true,
1168      "reasoning_levels": [
1169        "low",
1170        "medium",
1171        "high"
1172      ],
1173      "default_reasoning_effort": "medium",
1174      "supports_attachments": true,
1175      "options": {}
1176    },
1177    {
1178      "id": "google/gemini-3-pro-preview",
1179      "name": "Gemini 3 Pro Preview",
1180      "cost_per_1m_in": 2,
1181      "cost_per_1m_out": 12,
1182      "cost_per_1m_in_cached": 0.2,
1183      "cost_per_1m_out_cached": 0,
1184      "context_window": 1000000,
1185      "default_max_tokens": 8000,
1186      "can_reason": true,
1187      "reasoning_levels": [
1188        "low",
1189        "medium",
1190        "high"
1191      ],
1192      "default_reasoning_effort": "medium",
1193      "supports_attachments": true,
1194      "options": {}
1195    },
1196    {
1197      "id": "google/gemini-3.1-flash-lite-preview",
1198      "name": "Gemini 3.1 Flash Lite Preview",
1199      "cost_per_1m_in": 0.25,
1200      "cost_per_1m_out": 1.5,
1201      "cost_per_1m_in_cached": 0,
1202      "cost_per_1m_out_cached": 0,
1203      "context_window": 1000000,
1204      "default_max_tokens": 8000,
1205      "can_reason": true,
1206      "reasoning_levels": [
1207        "low",
1208        "medium",
1209        "high"
1210      ],
1211      "default_reasoning_effort": "medium",
1212      "supports_attachments": true,
1213      "options": {}
1214    },
1215    {
1216      "id": "google/gemini-3.1-pro-preview",
1217      "name": "Gemini 3.1 Pro Preview",
1218      "cost_per_1m_in": 2,
1219      "cost_per_1m_out": 12,
1220      "cost_per_1m_in_cached": 0.2,
1221      "cost_per_1m_out_cached": 0,
1222      "context_window": 1000000,
1223      "default_max_tokens": 8000,
1224      "can_reason": true,
1225      "reasoning_levels": [
1226        "low",
1227        "medium",
1228        "high"
1229      ],
1230      "default_reasoning_effort": "medium",
1231      "supports_attachments": true,
1232      "options": {}
1233    },
1234    {
1235      "id": "xai/grok-2-vision",
1236      "name": "Grok 2 Vision",
1237      "cost_per_1m_in": 2,
1238      "cost_per_1m_out": 10,
1239      "cost_per_1m_in_cached": 0,
1240      "cost_per_1m_out_cached": 0,
1241      "context_window": 32768,
1242      "default_max_tokens": 8000,
1243      "can_reason": false,
1244      "supports_attachments": true,
1245      "options": {}
1246    },
1247    {
1248      "id": "xai/grok-3",
1249      "name": "Grok 3 Beta",
1250      "cost_per_1m_in": 3,
1251      "cost_per_1m_out": 15,
1252      "cost_per_1m_in_cached": 0,
1253      "cost_per_1m_out_cached": 0,
1254      "context_window": 131072,
1255      "default_max_tokens": 8000,
1256      "can_reason": false,
1257      "supports_attachments": false,
1258      "options": {}
1259    },
1260    {
1261      "id": "xai/grok-3-fast",
1262      "name": "Grok 3 Fast Beta",
1263      "cost_per_1m_in": 5,
1264      "cost_per_1m_out": 25,
1265      "cost_per_1m_in_cached": 0,
1266      "cost_per_1m_out_cached": 0,
1267      "context_window": 131072,
1268      "default_max_tokens": 8000,
1269      "can_reason": false,
1270      "supports_attachments": false,
1271      "options": {}
1272    },
1273    {
1274      "id": "xai/grok-3-mini",
1275      "name": "Grok 3 Mini Beta",
1276      "cost_per_1m_in": 0.3,
1277      "cost_per_1m_out": 0.5,
1278      "cost_per_1m_in_cached": 0,
1279      "cost_per_1m_out_cached": 0,
1280      "context_window": 131072,
1281      "default_max_tokens": 8000,
1282      "can_reason": false,
1283      "supports_attachments": false,
1284      "options": {}
1285    },
1286    {
1287      "id": "xai/grok-3-mini-fast",
1288      "name": "Grok 3 Mini Fast Beta",
1289      "cost_per_1m_in": 0.6,
1290      "cost_per_1m_out": 4,
1291      "cost_per_1m_in_cached": 0,
1292      "cost_per_1m_out_cached": 0,
1293      "context_window": 131072,
1294      "default_max_tokens": 8000,
1295      "can_reason": false,
1296      "supports_attachments": false,
1297      "options": {}
1298    },
1299    {
1300      "id": "xai/grok-4",
1301      "name": "Grok 4",
1302      "cost_per_1m_in": 3,
1303      "cost_per_1m_out": 15,
1304      "cost_per_1m_in_cached": 0,
1305      "cost_per_1m_out_cached": 0,
1306      "context_window": 256000,
1307      "default_max_tokens": 8000,
1308      "can_reason": true,
1309      "reasoning_levels": [
1310        "low",
1311        "medium",
1312        "high"
1313      ],
1314      "default_reasoning_effort": "medium",
1315      "supports_attachments": true,
1316      "options": {}
1317    },
1318    {
1319      "id": "xai/grok-4-fast-non-reasoning",
1320      "name": "Grok 4 Fast Non-Reasoning",
1321      "cost_per_1m_in": 0.2,
1322      "cost_per_1m_out": 0.5,
1323      "cost_per_1m_in_cached": 0.05,
1324      "cost_per_1m_out_cached": 0,
1325      "context_window": 2000000,
1326      "default_max_tokens": 8000,
1327      "can_reason": false,
1328      "supports_attachments": false,
1329      "options": {}
1330    },
1331    {
1332      "id": "xai/grok-4-fast-reasoning",
1333      "name": "Grok 4 Fast Reasoning",
1334      "cost_per_1m_in": 0.2,
1335      "cost_per_1m_out": 0.5,
1336      "cost_per_1m_in_cached": 0.05,
1337      "cost_per_1m_out_cached": 0,
1338      "context_window": 2000000,
1339      "default_max_tokens": 8000,
1340      "can_reason": true,
1341      "reasoning_levels": [
1342        "low",
1343        "medium",
1344        "high"
1345      ],
1346      "default_reasoning_effort": "medium",
1347      "supports_attachments": false,
1348      "options": {}
1349    },
1350    {
1351      "id": "xai/grok-4.1-fast-non-reasoning",
1352      "name": "Grok 4.1 Fast Non-Reasoning",
1353      "cost_per_1m_in": 0.2,
1354      "cost_per_1m_out": 0.5,
1355      "cost_per_1m_in_cached": 0.05,
1356      "cost_per_1m_out_cached": 0,
1357      "context_window": 2000000,
1358      "default_max_tokens": 8000,
1359      "can_reason": false,
1360      "supports_attachments": false,
1361      "options": {}
1362    },
1363    {
1364      "id": "xai/grok-4.1-fast-reasoning",
1365      "name": "Grok 4.1 Fast Reasoning",
1366      "cost_per_1m_in": 0.2,
1367      "cost_per_1m_out": 0.5,
1368      "cost_per_1m_in_cached": 0.05,
1369      "cost_per_1m_out_cached": 0,
1370      "context_window": 2000000,
1371      "default_max_tokens": 8000,
1372      "can_reason": true,
1373      "reasoning_levels": [
1374        "low",
1375        "medium",
1376        "high"
1377      ],
1378      "default_reasoning_effort": "medium",
1379      "supports_attachments": false,
1380      "options": {}
1381    },
1382    {
1383      "id": "xai/grok-code-fast-1",
1384      "name": "Grok Code Fast 1",
1385      "cost_per_1m_in": 0.2,
1386      "cost_per_1m_out": 1.5,
1387      "cost_per_1m_in_cached": 0.02,
1388      "cost_per_1m_out_cached": 0,
1389      "context_window": 256000,
1390      "default_max_tokens": 8000,
1391      "can_reason": true,
1392      "reasoning_levels": [
1393        "low",
1394        "medium",
1395        "high"
1396      ],
1397      "default_reasoning_effort": "medium",
1398      "supports_attachments": false,
1399      "options": {}
1400    },
1401    {
1402      "id": "prime-intellect/intellect-3",
1403      "name": "INTELLECT 3",
1404      "cost_per_1m_in": 0.2,
1405      "cost_per_1m_out": 1.1,
1406      "cost_per_1m_in_cached": 0,
1407      "cost_per_1m_out_cached": 0,
1408      "context_window": 131072,
1409      "default_max_tokens": 8000,
1410      "can_reason": true,
1411      "reasoning_levels": [
1412        "low",
1413        "medium",
1414        "high"
1415      ],
1416      "default_reasoning_effort": "medium",
1417      "supports_attachments": false,
1418      "options": {}
1419    },
1420    {
1421      "id": "moonshotai/kimi-k2",
1422      "name": "Kimi K2",
1423      "cost_per_1m_in": 0.6,
1424      "cost_per_1m_out": 2.5,
1425      "cost_per_1m_in_cached": 0,
1426      "cost_per_1m_out_cached": 0,
1427      "context_window": 131072,
1428      "default_max_tokens": 8000,
1429      "can_reason": false,
1430      "supports_attachments": false,
1431      "options": {}
1432    },
1433    {
1434      "id": "moonshotai/kimi-k2-0905",
1435      "name": "Kimi K2 0905",
1436      "cost_per_1m_in": 0.6,
1437      "cost_per_1m_out": 2.5,
1438      "cost_per_1m_in_cached": 0,
1439      "cost_per_1m_out_cached": 0,
1440      "context_window": 256000,
1441      "default_max_tokens": 8000,
1442      "can_reason": false,
1443      "supports_attachments": false,
1444      "options": {}
1445    },
1446    {
1447      "id": "moonshotai/kimi-k2-thinking",
1448      "name": "Kimi K2 Thinking",
1449      "cost_per_1m_in": 0.6,
1450      "cost_per_1m_out": 2.5,
1451      "cost_per_1m_in_cached": 0.15,
1452      "cost_per_1m_out_cached": 0,
1453      "context_window": 262114,
1454      "default_max_tokens": 8000,
1455      "can_reason": true,
1456      "reasoning_levels": [
1457        "low",
1458        "medium",
1459        "high"
1460      ],
1461      "default_reasoning_effort": "medium",
1462      "supports_attachments": false,
1463      "options": {}
1464    },
1465    {
1466      "id": "moonshotai/kimi-k2-thinking-turbo",
1467      "name": "Kimi K2 Thinking Turbo",
1468      "cost_per_1m_in": 1.15,
1469      "cost_per_1m_out": 8,
1470      "cost_per_1m_in_cached": 0.15,
1471      "cost_per_1m_out_cached": 0,
1472      "context_window": 262114,
1473      "default_max_tokens": 8000,
1474      "can_reason": true,
1475      "reasoning_levels": [
1476        "low",
1477        "medium",
1478        "high"
1479      ],
1480      "default_reasoning_effort": "medium",
1481      "supports_attachments": false,
1482      "options": {}
1483    },
1484    {
1485      "id": "moonshotai/kimi-k2-turbo",
1486      "name": "Kimi K2 Turbo",
1487      "cost_per_1m_in": 2.4,
1488      "cost_per_1m_out": 10,
1489      "cost_per_1m_in_cached": 0,
1490      "cost_per_1m_out_cached": 0,
1491      "context_window": 256000,
1492      "default_max_tokens": 8000,
1493      "can_reason": false,
1494      "supports_attachments": false,
1495      "options": {}
1496    },
1497    {
1498      "id": "moonshotai/kimi-k2.5",
1499      "name": "Kimi K2.5",
1500      "cost_per_1m_in": 0.6,
1501      "cost_per_1m_out": 3,
1502      "cost_per_1m_in_cached": 0.1,
1503      "cost_per_1m_out_cached": 0,
1504      "context_window": 262114,
1505      "default_max_tokens": 8000,
1506      "can_reason": true,
1507      "reasoning_levels": [
1508        "low",
1509        "medium",
1510        "high"
1511      ],
1512      "default_reasoning_effort": "medium",
1513      "supports_attachments": true,
1514      "options": {}
1515    },
1516    {
1517      "id": "meta/llama-3.1-70b",
1518      "name": "Llama 3.1 70B Instruct",
1519      "cost_per_1m_in": 0.72,
1520      "cost_per_1m_out": 0.72,
1521      "cost_per_1m_in_cached": 0,
1522      "cost_per_1m_out_cached": 0,
1523      "context_window": 128000,
1524      "default_max_tokens": 8000,
1525      "can_reason": false,
1526      "supports_attachments": false,
1527      "options": {}
1528    },
1529    {
1530      "id": "meta/llama-3.1-8b",
1531      "name": "Llama 3.1 8B Instruct",
1532      "cost_per_1m_in": 0.1,
1533      "cost_per_1m_out": 0.1,
1534      "cost_per_1m_in_cached": 0,
1535      "cost_per_1m_out_cached": 0,
1536      "context_window": 128000,
1537      "default_max_tokens": 8000,
1538      "can_reason": false,
1539      "supports_attachments": false,
1540      "options": {}
1541    },
1542    {
1543      "id": "meta/llama-3.2-11b",
1544      "name": "Llama 3.2 11B Vision Instruct",
1545      "cost_per_1m_in": 0.16,
1546      "cost_per_1m_out": 0.16,
1547      "cost_per_1m_in_cached": 0,
1548      "cost_per_1m_out_cached": 0,
1549      "context_window": 128000,
1550      "default_max_tokens": 8000,
1551      "can_reason": false,
1552      "supports_attachments": true,
1553      "options": {}
1554    },
1555    {
1556      "id": "meta/llama-3.2-90b",
1557      "name": "Llama 3.2 90B Vision Instruct",
1558      "cost_per_1m_in": 0.72,
1559      "cost_per_1m_out": 0.72,
1560      "cost_per_1m_in_cached": 0,
1561      "cost_per_1m_out_cached": 0,
1562      "context_window": 128000,
1563      "default_max_tokens": 8000,
1564      "can_reason": false,
1565      "supports_attachments": true,
1566      "options": {}
1567    },
1568    {
1569      "id": "meta/llama-3.3-70b",
1570      "name": "Llama 3.3 70B Instruct",
1571      "cost_per_1m_in": 0.72,
1572      "cost_per_1m_out": 0.72,
1573      "cost_per_1m_in_cached": 0,
1574      "cost_per_1m_out_cached": 0,
1575      "context_window": 128000,
1576      "default_max_tokens": 8000,
1577      "can_reason": false,
1578      "supports_attachments": false,
1579      "options": {}
1580    },
1581    {
1582      "id": "meta/llama-4-maverick",
1583      "name": "Llama 4 Maverick 17B Instruct",
1584      "cost_per_1m_in": 0.24,
1585      "cost_per_1m_out": 0.97,
1586      "cost_per_1m_in_cached": 0,
1587      "cost_per_1m_out_cached": 0,
1588      "context_window": 128000,
1589      "default_max_tokens": 8000,
1590      "can_reason": false,
1591      "supports_attachments": true,
1592      "options": {}
1593    },
1594    {
1595      "id": "meta/llama-4-scout",
1596      "name": "Llama 4 Scout 17B Instruct",
1597      "cost_per_1m_in": 0.17,
1598      "cost_per_1m_out": 0.66,
1599      "cost_per_1m_in_cached": 0,
1600      "cost_per_1m_out_cached": 0,
1601      "context_window": 128000,
1602      "default_max_tokens": 8000,
1603      "can_reason": false,
1604      "supports_attachments": true,
1605      "options": {}
1606    },
1607    {
1608      "id": "meituan/longcat-flash-chat",
1609      "name": "LongCat Flash Chat",
1610      "cost_per_1m_in": 0,
1611      "cost_per_1m_out": 0,
1612      "cost_per_1m_in_cached": 0,
1613      "cost_per_1m_out_cached": 0,
1614      "context_window": 128000,
1615      "default_max_tokens": 8000,
1616      "can_reason": false,
1617      "supports_attachments": false,
1618      "options": {}
1619    },
1620    {
1621      "id": "meituan/longcat-flash-thinking",
1622      "name": "LongCat Flash Thinking",
1623      "cost_per_1m_in": 0.15,
1624      "cost_per_1m_out": 1.5,
1625      "cost_per_1m_in_cached": 0,
1626      "cost_per_1m_out_cached": 0,
1627      "context_window": 128000,
1628      "default_max_tokens": 8000,
1629      "can_reason": true,
1630      "reasoning_levels": [
1631        "low",
1632        "medium",
1633        "high"
1634      ],
1635      "default_reasoning_effort": "medium",
1636      "supports_attachments": false,
1637      "options": {}
1638    },
1639    {
1640      "id": "inception/mercury-2",
1641      "name": "Mercury 2",
1642      "cost_per_1m_in": 0.25,
1643      "cost_per_1m_out": 0.75,
1644      "cost_per_1m_in_cached": 0.025,
1645      "cost_per_1m_out_cached": 0,
1646      "context_window": 128000,
1647      "default_max_tokens": 8000,
1648      "can_reason": true,
1649      "reasoning_levels": [
1650        "low",
1651        "medium",
1652        "high"
1653      ],
1654      "default_reasoning_effort": "medium",
1655      "supports_attachments": false,
1656      "options": {}
1657    },
1658    {
1659      "id": "inception/mercury-coder-small",
1660      "name": "Mercury Coder Small Beta",
1661      "cost_per_1m_in": 0.25,
1662      "cost_per_1m_out": 1,
1663      "cost_per_1m_in_cached": 0,
1664      "cost_per_1m_out_cached": 0,
1665      "context_window": 32000,
1666      "default_max_tokens": 8000,
1667      "can_reason": false,
1668      "supports_attachments": false,
1669      "options": {}
1670    },
1671    {
1672      "id": "xiaomi/mimo-v2-flash",
1673      "name": "MiMo V2 Flash",
1674      "cost_per_1m_in": 0.1,
1675      "cost_per_1m_out": 0.3,
1676      "cost_per_1m_in_cached": 0.02,
1677      "cost_per_1m_out_cached": 0,
1678      "context_window": 262144,
1679      "default_max_tokens": 8000,
1680      "can_reason": true,
1681      "reasoning_levels": [
1682        "low",
1683        "medium",
1684        "high"
1685      ],
1686      "default_reasoning_effort": "medium",
1687      "supports_attachments": false,
1688      "options": {}
1689    },
1690    {
1691      "id": "minimax/minimax-m2",
1692      "name": "MiniMax M2",
1693      "cost_per_1m_in": 0.3,
1694      "cost_per_1m_out": 1.2,
1695      "cost_per_1m_in_cached": 0.03,
1696      "cost_per_1m_out_cached": 0.375,
1697      "context_window": 205000,
1698      "default_max_tokens": 8000,
1699      "can_reason": true,
1700      "reasoning_levels": [
1701        "low",
1702        "medium",
1703        "high"
1704      ],
1705      "default_reasoning_effort": "medium",
1706      "supports_attachments": false,
1707      "options": {}
1708    },
1709    {
1710      "id": "minimax/minimax-m2.1",
1711      "name": "MiniMax M2.1",
1712      "cost_per_1m_in": 0.3,
1713      "cost_per_1m_out": 1.2,
1714      "cost_per_1m_in_cached": 0.03,
1715      "cost_per_1m_out_cached": 0.375,
1716      "context_window": 204800,
1717      "default_max_tokens": 8000,
1718      "can_reason": true,
1719      "reasoning_levels": [
1720        "low",
1721        "medium",
1722        "high"
1723      ],
1724      "default_reasoning_effort": "medium",
1725      "supports_attachments": false,
1726      "options": {}
1727    },
1728    {
1729      "id": "minimax/minimax-m2.1-lightning",
1730      "name": "MiniMax M2.1 Lightning",
1731      "cost_per_1m_in": 0.3,
1732      "cost_per_1m_out": 2.4,
1733      "cost_per_1m_in_cached": 0.03,
1734      "cost_per_1m_out_cached": 0.375,
1735      "context_window": 204800,
1736      "default_max_tokens": 8000,
1737      "can_reason": true,
1738      "reasoning_levels": [
1739        "low",
1740        "medium",
1741        "high"
1742      ],
1743      "default_reasoning_effort": "medium",
1744      "supports_attachments": false,
1745      "options": {}
1746    },
1747    {
1748      "id": "minimax/minimax-m2.5",
1749      "name": "MiniMax M2.5",
1750      "cost_per_1m_in": 0.3,
1751      "cost_per_1m_out": 1.2,
1752      "cost_per_1m_in_cached": 0.03,
1753      "cost_per_1m_out_cached": 0.375,
1754      "context_window": 204800,
1755      "default_max_tokens": 8000,
1756      "can_reason": true,
1757      "reasoning_levels": [
1758        "low",
1759        "medium",
1760        "high"
1761      ],
1762      "default_reasoning_effort": "medium",
1763      "supports_attachments": false,
1764      "options": {}
1765    },
1766    {
1767      "id": "minimax/minimax-m2.5-highspeed",
1768      "name": "MiniMax M2.5 High Speed",
1769      "cost_per_1m_in": 0.6,
1770      "cost_per_1m_out": 2.4,
1771      "cost_per_1m_in_cached": 0.03,
1772      "cost_per_1m_out_cached": 0.375,
1773      "context_window": 0,
1774      "default_max_tokens": 0,
1775      "can_reason": true,
1776      "reasoning_levels": [
1777        "low",
1778        "medium",
1779        "high"
1780      ],
1781      "default_reasoning_effort": "medium",
1782      "supports_attachments": false,
1783      "options": {}
1784    },
1785    {
1786      "id": "mistral/ministral-3b",
1787      "name": "Ministral 3B",
1788      "cost_per_1m_in": 0.04,
1789      "cost_per_1m_out": 0.04,
1790      "cost_per_1m_in_cached": 0,
1791      "cost_per_1m_out_cached": 0,
1792      "context_window": 128000,
1793      "default_max_tokens": 4000,
1794      "can_reason": false,
1795      "supports_attachments": false,
1796      "options": {}
1797    },
1798    {
1799      "id": "mistral/ministral-8b",
1800      "name": "Ministral 8B",
1801      "cost_per_1m_in": 0.1,
1802      "cost_per_1m_out": 0.1,
1803      "cost_per_1m_in_cached": 0,
1804      "cost_per_1m_out_cached": 0,
1805      "context_window": 128000,
1806      "default_max_tokens": 4000,
1807      "can_reason": false,
1808      "supports_attachments": false,
1809      "options": {}
1810    },
1811    {
1812      "id": "mistral/codestral",
1813      "name": "Mistral Codestral",
1814      "cost_per_1m_in": 0.3,
1815      "cost_per_1m_out": 0.9,
1816      "cost_per_1m_in_cached": 0,
1817      "cost_per_1m_out_cached": 0,
1818      "context_window": 128000,
1819      "default_max_tokens": 4000,
1820      "can_reason": false,
1821      "supports_attachments": false,
1822      "options": {}
1823    },
1824    {
1825      "id": "mistral/mistral-medium",
1826      "name": "Mistral Medium 3.1",
1827      "cost_per_1m_in": 0.4,
1828      "cost_per_1m_out": 2,
1829      "cost_per_1m_in_cached": 0,
1830      "cost_per_1m_out_cached": 0,
1831      "context_window": 128000,
1832      "default_max_tokens": 8000,
1833      "can_reason": false,
1834      "supports_attachments": true,
1835      "options": {}
1836    },
1837    {
1838      "id": "mistral/mistral-small",
1839      "name": "Mistral Small",
1840      "cost_per_1m_in": 0.1,
1841      "cost_per_1m_out": 0.3,
1842      "cost_per_1m_in_cached": 0,
1843      "cost_per_1m_out_cached": 0,
1844      "context_window": 32000,
1845      "default_max_tokens": 4000,
1846      "can_reason": false,
1847      "supports_attachments": true,
1848      "options": {}
1849    },
1850    {
1851      "id": "nvidia/nemotron-nano-12b-v2-vl",
1852      "name": "Nvidia Nemotron Nano 12B V2 VL",
1853      "cost_per_1m_in": 0.2,
1854      "cost_per_1m_out": 0.6,
1855      "cost_per_1m_in_cached": 0,
1856      "cost_per_1m_out_cached": 0,
1857      "context_window": 131072,
1858      "default_max_tokens": 8000,
1859      "can_reason": true,
1860      "reasoning_levels": [
1861        "low",
1862        "medium",
1863        "high"
1864      ],
1865      "default_reasoning_effort": "medium",
1866      "supports_attachments": true,
1867      "options": {}
1868    },
1869    {
1870      "id": "nvidia/nemotron-nano-9b-v2",
1871      "name": "Nvidia Nemotron Nano 9B V2",
1872      "cost_per_1m_in": 0.06,
1873      "cost_per_1m_out": 0.23,
1874      "cost_per_1m_in_cached": 0,
1875      "cost_per_1m_out_cached": 0,
1876      "context_window": 131072,
1877      "default_max_tokens": 8000,
1878      "can_reason": true,
1879      "reasoning_levels": [
1880        "low",
1881        "medium",
1882        "high"
1883      ],
1884      "default_reasoning_effort": "medium",
1885      "supports_attachments": false,
1886      "options": {}
1887    },
1888    {
1889      "id": "mistral/pixtral-12b",
1890      "name": "Pixtral 12B 2409",
1891      "cost_per_1m_in": 0.15,
1892      "cost_per_1m_out": 0.15,
1893      "cost_per_1m_in_cached": 0,
1894      "cost_per_1m_out_cached": 0,
1895      "context_window": 128000,
1896      "default_max_tokens": 4000,
1897      "can_reason": false,
1898      "supports_attachments": true,
1899      "options": {}
1900    },
1901    {
1902      "id": "mistral/pixtral-large",
1903      "name": "Pixtral Large",
1904      "cost_per_1m_in": 2,
1905      "cost_per_1m_out": 6,
1906      "cost_per_1m_in_cached": 0,
1907      "cost_per_1m_out_cached": 0,
1908      "context_window": 128000,
1909      "default_max_tokens": 4000,
1910      "can_reason": false,
1911      "supports_attachments": true,
1912      "options": {}
1913    },
1914    {
1915      "id": "alibaba/qwen-3-32b",
1916      "name": "Qwen 3 32B",
1917      "cost_per_1m_in": 0.29,
1918      "cost_per_1m_out": 0.59,
1919      "cost_per_1m_in_cached": 0,
1920      "cost_per_1m_out_cached": 0,
1921      "context_window": 131072,
1922      "default_max_tokens": 8000,
1923      "can_reason": true,
1924      "reasoning_levels": [
1925        "low",
1926        "medium",
1927        "high"
1928      ],
1929      "default_reasoning_effort": "medium",
1930      "supports_attachments": false,
1931      "options": {}
1932    },
1933    {
1934      "id": "alibaba/qwen3-coder-30b-a3b",
1935      "name": "Qwen 3 Coder 30B A3B Instruct",
1936      "cost_per_1m_in": 0.15,
1937      "cost_per_1m_out": 0.6,
1938      "cost_per_1m_in_cached": 0,
1939      "cost_per_1m_out_cached": 0,
1940      "context_window": 262144,
1941      "default_max_tokens": 8000,
1942      "can_reason": true,
1943      "reasoning_levels": [
1944        "low",
1945        "medium",
1946        "high"
1947      ],
1948      "default_reasoning_effort": "medium",
1949      "supports_attachments": false,
1950      "options": {}
1951    },
1952    {
1953      "id": "alibaba/qwen3-max-thinking",
1954      "name": "Qwen 3 Max Thinking",
1955      "cost_per_1m_in": 1.2,
1956      "cost_per_1m_out": 6,
1957      "cost_per_1m_in_cached": 0.24,
1958      "cost_per_1m_out_cached": 0,
1959      "context_window": 256000,
1960      "default_max_tokens": 8000,
1961      "can_reason": true,
1962      "reasoning_levels": [
1963        "low",
1964        "medium",
1965        "high"
1966      ],
1967      "default_reasoning_effort": "medium",
1968      "supports_attachments": false,
1969      "options": {}
1970    },
1971    {
1972      "id": "alibaba/qwen3.5-flash",
1973      "name": "Qwen 3.5 Flash",
1974      "cost_per_1m_in": 0.1,
1975      "cost_per_1m_out": 0.4,
1976      "cost_per_1m_in_cached": 0.001,
1977      "cost_per_1m_out_cached": 0.125,
1978      "context_window": 1000000,
1979      "default_max_tokens": 8000,
1980      "can_reason": true,
1981      "reasoning_levels": [
1982        "low",
1983        "medium",
1984        "high"
1985      ],
1986      "default_reasoning_effort": "medium",
1987      "supports_attachments": true,
1988      "options": {}
1989    },
1990    {
1991      "id": "alibaba/qwen3.5-plus",
1992      "name": "Qwen 3.5 Plus",
1993      "cost_per_1m_in": 0.4,
1994      "cost_per_1m_out": 2.4,
1995      "cost_per_1m_in_cached": 0.04,
1996      "cost_per_1m_out_cached": 0.5,
1997      "context_window": 1000000,
1998      "default_max_tokens": 8000,
1999      "can_reason": true,
2000      "reasoning_levels": [
2001        "low",
2002        "medium",
2003        "high"
2004      ],
2005      "default_reasoning_effort": "medium",
2006      "supports_attachments": true,
2007      "options": {}
2008    },
2009    {
2010      "id": "alibaba/qwen3-235b-a22b-thinking",
2011      "name": "Qwen3 235B A22B Thinking 2507",
2012      "cost_per_1m_in": 0.3,
2013      "cost_per_1m_out": 2.9,
2014      "cost_per_1m_in_cached": 0,
2015      "cost_per_1m_out_cached": 0,
2016      "context_window": 262114,
2017      "default_max_tokens": 8000,
2018      "can_reason": true,
2019      "reasoning_levels": [
2020        "low",
2021        "medium",
2022        "high"
2023      ],
2024      "default_reasoning_effort": "medium",
2025      "supports_attachments": true,
2026      "options": {}
2027    },
2028    {
2029      "id": "alibaba/qwen3-coder",
2030      "name": "Qwen3 Coder 480B A35B Instruct",
2031      "cost_per_1m_in": 0.4,
2032      "cost_per_1m_out": 1.6,
2033      "cost_per_1m_in_cached": 0,
2034      "cost_per_1m_out_cached": 0,
2035      "context_window": 262144,
2036      "default_max_tokens": 8000,
2037      "can_reason": false,
2038      "supports_attachments": false,
2039      "options": {}
2040    },
2041    {
2042      "id": "alibaba/qwen3-coder-next",
2043      "name": "Qwen3 Coder Next",
2044      "cost_per_1m_in": 0.5,
2045      "cost_per_1m_out": 1.2,
2046      "cost_per_1m_in_cached": 0,
2047      "cost_per_1m_out_cached": 0,
2048      "context_window": 256000,
2049      "default_max_tokens": 8000,
2050      "can_reason": false,
2051      "supports_attachments": false,
2052      "options": {}
2053    },
2054    {
2055      "id": "alibaba/qwen3-coder-plus",
2056      "name": "Qwen3 Coder Plus",
2057      "cost_per_1m_in": 1,
2058      "cost_per_1m_out": 5,
2059      "cost_per_1m_in_cached": 0.2,
2060      "cost_per_1m_out_cached": 0,
2061      "context_window": 1000000,
2062      "default_max_tokens": 8000,
2063      "can_reason": false,
2064      "supports_attachments": false,
2065      "options": {}
2066    },
2067    {
2068      "id": "alibaba/qwen3-max",
2069      "name": "Qwen3 Max",
2070      "cost_per_1m_in": 1.2,
2071      "cost_per_1m_out": 6,
2072      "cost_per_1m_in_cached": 0.24,
2073      "cost_per_1m_out_cached": 0,
2074      "context_window": 262144,
2075      "default_max_tokens": 8000,
2076      "can_reason": false,
2077      "supports_attachments": false,
2078      "options": {}
2079    },
2080    {
2081      "id": "alibaba/qwen3-max-preview",
2082      "name": "Qwen3 Max Preview",
2083      "cost_per_1m_in": 1.2,
2084      "cost_per_1m_out": 6,
2085      "cost_per_1m_in_cached": 0.24,
2086      "cost_per_1m_out_cached": 0,
2087      "context_window": 262144,
2088      "default_max_tokens": 8000,
2089      "can_reason": false,
2090      "supports_attachments": false,
2091      "options": {}
2092    },
2093    {
2094      "id": "alibaba/qwen3-vl-thinking",
2095      "name": "Qwen3 VL 235B A22B Thinking",
2096      "cost_per_1m_in": 0.22,
2097      "cost_per_1m_out": 0.88,
2098      "cost_per_1m_in_cached": 0,
2099      "cost_per_1m_out_cached": 0,
2100      "context_window": 256000,
2101      "default_max_tokens": 8000,
2102      "can_reason": true,
2103      "reasoning_levels": [
2104        "low",
2105        "medium",
2106        "high"
2107      ],
2108      "default_reasoning_effort": "medium",
2109      "supports_attachments": true,
2110      "options": {}
2111    },
2112    {
2113      "id": "alibaba/qwen-3-14b",
2114      "name": "Qwen3-14B",
2115      "cost_per_1m_in": 0.06,
2116      "cost_per_1m_out": 0.24,
2117      "cost_per_1m_in_cached": 0,
2118      "cost_per_1m_out_cached": 0,
2119      "context_window": 40960,
2120      "default_max_tokens": 8000,
2121      "can_reason": true,
2122      "reasoning_levels": [
2123        "low",
2124        "medium",
2125        "high"
2126      ],
2127      "default_reasoning_effort": "medium",
2128      "supports_attachments": false,
2129      "options": {}
2130    },
2131    {
2132      "id": "alibaba/qwen-3-235b",
2133      "name": "Qwen3-235B-A22B",
2134      "cost_per_1m_in": 0.071,
2135      "cost_per_1m_out": 0.463,
2136      "cost_per_1m_in_cached": 0,
2137      "cost_per_1m_out_cached": 0,
2138      "context_window": 40960,
2139      "default_max_tokens": 8000,
2140      "can_reason": false,
2141      "supports_attachments": false,
2142      "options": {}
2143    },
2144    {
2145      "id": "alibaba/qwen-3-30b",
2146      "name": "Qwen3-30B-A3B",
2147      "cost_per_1m_in": 0.08,
2148      "cost_per_1m_out": 0.29,
2149      "cost_per_1m_in_cached": 0,
2150      "cost_per_1m_out_cached": 0,
2151      "context_window": 40960,
2152      "default_max_tokens": 8000,
2153      "can_reason": true,
2154      "reasoning_levels": [
2155        "low",
2156        "medium",
2157        "high"
2158      ],
2159      "default_reasoning_effort": "medium",
2160      "supports_attachments": false,
2161      "options": {}
2162    },
2163    {
2164      "id": "bytedance/seed-1.6",
2165      "name": "Seed 1.6",
2166      "cost_per_1m_in": 0.25,
2167      "cost_per_1m_out": 2,
2168      "cost_per_1m_in_cached": 0.05,
2169      "cost_per_1m_out_cached": 0,
2170      "context_window": 256000,
2171      "default_max_tokens": 8000,
2172      "can_reason": true,
2173      "reasoning_levels": [
2174        "low",
2175        "medium",
2176        "high"
2177      ],
2178      "default_reasoning_effort": "medium",
2179      "supports_attachments": false,
2180      "options": {}
2181    },
2182    {
2183      "id": "perplexity/sonar",
2184      "name": "Sonar",
2185      "cost_per_1m_in": 1,
2186      "cost_per_1m_out": 1,
2187      "cost_per_1m_in_cached": 0,
2188      "cost_per_1m_out_cached": 0,
2189      "context_window": 127000,
2190      "default_max_tokens": 8000,
2191      "can_reason": false,
2192      "supports_attachments": true,
2193      "options": {}
2194    },
2195    {
2196      "id": "perplexity/sonar-pro",
2197      "name": "Sonar Pro",
2198      "cost_per_1m_in": 3,
2199      "cost_per_1m_out": 15,
2200      "cost_per_1m_in_cached": 0,
2201      "cost_per_1m_out_cached": 0,
2202      "context_window": 200000,
2203      "default_max_tokens": 8000,
2204      "can_reason": false,
2205      "supports_attachments": true,
2206      "options": {}
2207    },
2208    {
2209      "id": "arcee-ai/trinity-large-preview",
2210      "name": "Trinity Large Preview",
2211      "cost_per_1m_in": 0.25,
2212      "cost_per_1m_out": 1,
2213      "cost_per_1m_in_cached": 0,
2214      "cost_per_1m_out_cached": 0,
2215      "context_window": 131000,
2216      "default_max_tokens": 8000,
2217      "can_reason": false,
2218      "supports_attachments": false,
2219      "options": {}
2220    },
2221    {
2222      "id": "openai/gpt-oss-20b",
2223      "name": "gpt-oss-20b",
2224      "cost_per_1m_in": 0.07,
2225      "cost_per_1m_out": 0.3,
2226      "cost_per_1m_in_cached": 0,
2227      "cost_per_1m_out_cached": 0,
2228      "context_window": 128000,
2229      "default_max_tokens": 8000,
2230      "can_reason": true,
2231      "reasoning_levels": [
2232        "low",
2233        "medium",
2234        "high"
2235      ],
2236      "default_reasoning_effort": "medium",
2237      "supports_attachments": false,
2238      "options": {}
2239    },
2240    {
2241      "id": "openai/gpt-oss-safeguard-20b",
2242      "name": "gpt-oss-safeguard-20b",
2243      "cost_per_1m_in": 0.075,
2244      "cost_per_1m_out": 0.3,
2245      "cost_per_1m_in_cached": 0.037,
2246      "cost_per_1m_out_cached": 0,
2247      "context_window": 131072,
2248      "default_max_tokens": 8000,
2249      "can_reason": true,
2250      "reasoning_levels": [
2251        "low",
2252        "medium",
2253        "high"
2254      ],
2255      "default_reasoning_effort": "medium",
2256      "supports_attachments": false,
2257      "options": {}
2258    },
2259    {
2260      "id": "openai/o1",
2261      "name": "o1",
2262      "cost_per_1m_in": 15,
2263      "cost_per_1m_out": 60,
2264      "cost_per_1m_in_cached": 7.5,
2265      "cost_per_1m_out_cached": 0,
2266      "context_window": 200000,
2267      "default_max_tokens": 8000,
2268      "can_reason": true,
2269      "reasoning_levels": [
2270        "low",
2271        "medium",
2272        "high"
2273      ],
2274      "default_reasoning_effort": "medium",
2275      "supports_attachments": true,
2276      "options": {}
2277    },
2278    {
2279      "id": "openai/o3",
2280      "name": "o3",
2281      "cost_per_1m_in": 2,
2282      "cost_per_1m_out": 8,
2283      "cost_per_1m_in_cached": 0.5,
2284      "cost_per_1m_out_cached": 0,
2285      "context_window": 200000,
2286      "default_max_tokens": 8000,
2287      "can_reason": true,
2288      "reasoning_levels": [
2289        "low",
2290        "medium",
2291        "high"
2292      ],
2293      "default_reasoning_effort": "medium",
2294      "supports_attachments": true,
2295      "options": {}
2296    },
2297    {
2298      "id": "openai/o3-pro",
2299      "name": "o3 Pro",
2300      "cost_per_1m_in": 20,
2301      "cost_per_1m_out": 80,
2302      "cost_per_1m_in_cached": 0,
2303      "cost_per_1m_out_cached": 0,
2304      "context_window": 200000,
2305      "default_max_tokens": 8000,
2306      "can_reason": true,
2307      "reasoning_levels": [
2308        "low",
2309        "medium",
2310        "high"
2311      ],
2312      "default_reasoning_effort": "medium",
2313      "supports_attachments": true,
2314      "options": {}
2315    },
2316    {
2317      "id": "openai/o3-deep-research",
2318      "name": "o3-deep-research",
2319      "cost_per_1m_in": 10,
2320      "cost_per_1m_out": 40,
2321      "cost_per_1m_in_cached": 2.5,
2322      "cost_per_1m_out_cached": 0,
2323      "context_window": 200000,
2324      "default_max_tokens": 8000,
2325      "can_reason": true,
2326      "reasoning_levels": [
2327        "low",
2328        "medium",
2329        "high"
2330      ],
2331      "default_reasoning_effort": "medium",
2332      "supports_attachments": true,
2333      "options": {}
2334    },
2335    {
2336      "id": "openai/o3-mini",
2337      "name": "o3-mini",
2338      "cost_per_1m_in": 1.1,
2339      "cost_per_1m_out": 4.4,
2340      "cost_per_1m_in_cached": 0.55,
2341      "cost_per_1m_out_cached": 0,
2342      "context_window": 200000,
2343      "default_max_tokens": 8000,
2344      "can_reason": true,
2345      "reasoning_levels": [
2346        "low",
2347        "medium",
2348        "high"
2349      ],
2350      "default_reasoning_effort": "medium",
2351      "supports_attachments": false,
2352      "options": {}
2353    },
2354    {
2355      "id": "openai/o4-mini",
2356      "name": "o4-mini",
2357      "cost_per_1m_in": 1.1,
2358      "cost_per_1m_out": 4.4,
2359      "cost_per_1m_in_cached": 0.275,
2360      "cost_per_1m_out_cached": 0,
2361      "context_window": 200000,
2362      "default_max_tokens": 8000,
2363      "can_reason": true,
2364      "reasoning_levels": [
2365        "low",
2366        "medium",
2367        "high"
2368      ],
2369      "default_reasoning_effort": "medium",
2370      "supports_attachments": true,
2371      "options": {}
2372    },
2373    {
2374      "id": "vercel/v0-1.0-md",
2375      "name": "v0-1.0-md",
2376      "cost_per_1m_in": 3,
2377      "cost_per_1m_out": 15,
2378      "cost_per_1m_in_cached": 0,
2379      "cost_per_1m_out_cached": 0,
2380      "context_window": 128000,
2381      "default_max_tokens": 8000,
2382      "can_reason": false,
2383      "supports_attachments": true,
2384      "options": {}
2385    },
2386    {
2387      "id": "vercel/v0-1.5-md",
2388      "name": "v0-1.5-md",
2389      "cost_per_1m_in": 3,
2390      "cost_per_1m_out": 15,
2391      "cost_per_1m_in_cached": 0,
2392      "cost_per_1m_out_cached": 0,
2393      "context_window": 128000,
2394      "default_max_tokens": 8000,
2395      "can_reason": false,
2396      "supports_attachments": true,
2397      "options": {}
2398    }
2399  ],
2400  "default_headers": {
2401    "HTTP-Referer": "https://charm.land",
2402    "X-Title": "Crush"
2403  }
2404}