vercel.json

   1{
   2  "name": "Vercel",
   3  "id": "vercel",
   4  "api_key": "$VERCEL_API_KEY",
   5  "api_endpoint": "https://ai-gateway.vercel.sh/v1",
   6  "type": "vercel",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-haiku-4.5",
   9  "models": [
  10    {
  11      "id": "anthropic/claude-3-haiku",
  12      "name": "Claude 3 Haiku",
  13      "cost_per_1m_in": 0.25,
  14      "cost_per_1m_out": 1.25,
  15      "cost_per_1m_in_cached": 0.03,
  16      "cost_per_1m_out_cached": 0.3,
  17      "context_window": 200000,
  18      "default_max_tokens": 4096,
  19      "can_reason": false,
  20      "supports_attachments": true
  21    },
  22    {
  23      "id": "anthropic/claude-3.5-haiku",
  24      "name": "Claude 3.5 Haiku",
  25      "cost_per_1m_in": 0.8,
  26      "cost_per_1m_out": 4,
  27      "cost_per_1m_in_cached": 0.08,
  28      "cost_per_1m_out_cached": 1,
  29      "context_window": 200000,
  30      "default_max_tokens": 8000,
  31      "can_reason": false,
  32      "supports_attachments": true
  33    },
  34    {
  35      "id": "anthropic/claude-3.5-sonnet",
  36      "name": "Claude 3.5 Sonnet",
  37      "cost_per_1m_in": 3,
  38      "cost_per_1m_out": 15,
  39      "cost_per_1m_in_cached": 0.3,
  40      "cost_per_1m_out_cached": 3.75,
  41      "context_window": 200000,
  42      "default_max_tokens": 8000,
  43      "can_reason": false,
  44      "supports_attachments": true
  45    },
  46    {
  47      "id": "anthropic/claude-3.5-sonnet-20240620",
  48      "name": "Claude 3.5 Sonnet (2024-06-20)",
  49      "cost_per_1m_in": 3,
  50      "cost_per_1m_out": 15,
  51      "cost_per_1m_in_cached": 0.3,
  52      "cost_per_1m_out_cached": 3.75,
  53      "context_window": 200000,
  54      "default_max_tokens": 8000,
  55      "can_reason": false,
  56      "supports_attachments": true
  57    },
  58    {
  59      "id": "anthropic/claude-3.7-sonnet",
  60      "name": "Claude 3.7 Sonnet",
  61      "cost_per_1m_in": 3,
  62      "cost_per_1m_out": 15,
  63      "cost_per_1m_in_cached": 0.3,
  64      "cost_per_1m_out_cached": 3.75,
  65      "context_window": 200000,
  66      "default_max_tokens": 8000,
  67      "can_reason": true,
  68      "reasoning_levels": [
  69        "none",
  70        "minimal",
  71        "low",
  72        "medium",
  73        "high",
  74        "xhigh"
  75      ],
  76      "default_reasoning_effort": "medium",
  77      "supports_attachments": true
  78    },
  79    {
  80      "id": "anthropic/claude-haiku-4.5",
  81      "name": "Claude Haiku 4.5",
  82      "cost_per_1m_in": 1,
  83      "cost_per_1m_out": 5,
  84      "cost_per_1m_in_cached": 0.1,
  85      "cost_per_1m_out_cached": 1.25,
  86      "context_window": 200000,
  87      "default_max_tokens": 8000,
  88      "can_reason": true,
  89      "reasoning_levels": [
  90        "none",
  91        "minimal",
  92        "low",
  93        "medium",
  94        "high",
  95        "xhigh"
  96      ],
  97      "default_reasoning_effort": "medium",
  98      "supports_attachments": true
  99    },
 100    {
 101      "id": "anthropic/claude-opus-4",
 102      "name": "Claude Opus 4",
 103      "cost_per_1m_in": 15,
 104      "cost_per_1m_out": 75,
 105      "cost_per_1m_in_cached": 1.5,
 106      "cost_per_1m_out_cached": 18.75,
 107      "context_window": 200000,
 108      "default_max_tokens": 8000,
 109      "can_reason": true,
 110      "reasoning_levels": [
 111        "none",
 112        "minimal",
 113        "low",
 114        "medium",
 115        "high",
 116        "xhigh"
 117      ],
 118      "default_reasoning_effort": "medium",
 119      "supports_attachments": true
 120    },
 121    {
 122      "id": "anthropic/claude-opus-4.1",
 123      "name": "Claude Opus 4.1",
 124      "cost_per_1m_in": 15,
 125      "cost_per_1m_out": 75,
 126      "cost_per_1m_in_cached": 1.5,
 127      "cost_per_1m_out_cached": 18.75,
 128      "context_window": 200000,
 129      "default_max_tokens": 8000,
 130      "can_reason": true,
 131      "reasoning_levels": [
 132        "none",
 133        "minimal",
 134        "low",
 135        "medium",
 136        "high",
 137        "xhigh"
 138      ],
 139      "default_reasoning_effort": "medium",
 140      "supports_attachments": true
 141    },
 142    {
 143      "id": "anthropic/claude-opus-4.5",
 144      "name": "Claude Opus 4.5",
 145      "cost_per_1m_in": 5,
 146      "cost_per_1m_out": 25,
 147      "cost_per_1m_in_cached": 0.5,
 148      "cost_per_1m_out_cached": 6.25,
 149      "context_window": 200000,
 150      "default_max_tokens": 8000,
 151      "can_reason": true,
 152      "reasoning_levels": [
 153        "none",
 154        "minimal",
 155        "low",
 156        "medium",
 157        "high",
 158        "xhigh"
 159      ],
 160      "default_reasoning_effort": "medium",
 161      "supports_attachments": true
 162    },
 163    {
 164      "id": "anthropic/claude-opus-4.6",
 165      "name": "Claude Opus 4.6",
 166      "cost_per_1m_in": 5,
 167      "cost_per_1m_out": 25,
 168      "cost_per_1m_in_cached": 0.5,
 169      "cost_per_1m_out_cached": 6.25,
 170      "context_window": 1000000,
 171      "default_max_tokens": 8000,
 172      "can_reason": true,
 173      "reasoning_levels": [
 174        "none",
 175        "minimal",
 176        "low",
 177        "medium",
 178        "high",
 179        "xhigh"
 180      ],
 181      "default_reasoning_effort": "medium",
 182      "supports_attachments": true
 183    },
 184    {
 185      "id": "anthropic/claude-sonnet-4",
 186      "name": "Claude Sonnet 4",
 187      "cost_per_1m_in": 3,
 188      "cost_per_1m_out": 15,
 189      "cost_per_1m_in_cached": 0.3,
 190      "cost_per_1m_out_cached": 3.75,
 191      "context_window": 1000000,
 192      "default_max_tokens": 8000,
 193      "can_reason": true,
 194      "reasoning_levels": [
 195        "none",
 196        "minimal",
 197        "low",
 198        "medium",
 199        "high",
 200        "xhigh"
 201      ],
 202      "default_reasoning_effort": "medium",
 203      "supports_attachments": true
 204    },
 205    {
 206      "id": "anthropic/claude-sonnet-4.5",
 207      "name": "Claude Sonnet 4.5",
 208      "cost_per_1m_in": 3,
 209      "cost_per_1m_out": 15,
 210      "cost_per_1m_in_cached": 0.3,
 211      "cost_per_1m_out_cached": 3.75,
 212      "context_window": 1000000,
 213      "default_max_tokens": 8000,
 214      "can_reason": true,
 215      "reasoning_levels": [
 216        "none",
 217        "minimal",
 218        "low",
 219        "medium",
 220        "high",
 221        "xhigh"
 222      ],
 223      "default_reasoning_effort": "medium",
 224      "supports_attachments": true
 225    },
 226    {
 227      "id": "anthropic/claude-sonnet-4.6",
 228      "name": "Claude Sonnet 4.6",
 229      "cost_per_1m_in": 3,
 230      "cost_per_1m_out": 15,
 231      "cost_per_1m_in_cached": 0.3,
 232      "cost_per_1m_out_cached": 3.75,
 233      "context_window": 1000000,
 234      "default_max_tokens": 8000,
 235      "can_reason": true,
 236      "reasoning_levels": [
 237        "none",
 238        "minimal",
 239        "low",
 240        "medium",
 241        "high",
 242        "xhigh"
 243      ],
 244      "default_reasoning_effort": "medium",
 245      "supports_attachments": true
 246    },
 247    {
 248      "id": "cohere/command-a",
 249      "name": "Command A",
 250      "cost_per_1m_in": 2.5,
 251      "cost_per_1m_out": 10,
 252      "cost_per_1m_in_cached": 0,
 253      "cost_per_1m_out_cached": 0,
 254      "context_window": 256000,
 255      "default_max_tokens": 8000,
 256      "can_reason": false,
 257      "supports_attachments": false
 258    },
 259    {
 260      "id": "deepseek/deepseek-v3",
 261      "name": "DeepSeek V3 0324",
 262      "cost_per_1m_in": 0.77,
 263      "cost_per_1m_out": 0.77,
 264      "cost_per_1m_in_cached": 0,
 265      "cost_per_1m_out_cached": 0,
 266      "context_window": 163840,
 267      "default_max_tokens": 8000,
 268      "can_reason": false,
 269      "supports_attachments": false
 270    },
 271    {
 272      "id": "deepseek/deepseek-v3.1-terminus",
 273      "name": "DeepSeek V3.1 Terminus",
 274      "cost_per_1m_in": 0.27,
 275      "cost_per_1m_out": 1,
 276      "cost_per_1m_in_cached": 0.135,
 277      "cost_per_1m_out_cached": 0,
 278      "context_window": 131072,
 279      "default_max_tokens": 8000,
 280      "can_reason": true,
 281      "reasoning_levels": [
 282        "low",
 283        "medium",
 284        "high"
 285      ],
 286      "default_reasoning_effort": "medium",
 287      "supports_attachments": false
 288    },
 289    {
 290      "id": "deepseek/deepseek-v3.2",
 291      "name": "DeepSeek V3.2",
 292      "cost_per_1m_in": 0.28,
 293      "cost_per_1m_out": 0.42,
 294      "cost_per_1m_in_cached": 0.028,
 295      "cost_per_1m_out_cached": 0,
 296      "context_window": 128000,
 297      "default_max_tokens": 8000,
 298      "can_reason": false,
 299      "supports_attachments": false
 300    },
 301    {
 302      "id": "deepseek/deepseek-v3.2-thinking",
 303      "name": "DeepSeek V3.2 Thinking",
 304      "cost_per_1m_in": 0.28,
 305      "cost_per_1m_out": 0.42,
 306      "cost_per_1m_in_cached": 0.028,
 307      "cost_per_1m_out_cached": 0,
 308      "context_window": 128000,
 309      "default_max_tokens": 8000,
 310      "can_reason": true,
 311      "reasoning_levels": [
 312        "low",
 313        "medium",
 314        "high"
 315      ],
 316      "default_reasoning_effort": "medium",
 317      "supports_attachments": false
 318    },
 319    {
 320      "id": "deepseek/deepseek-r1",
 321      "name": "DeepSeek-R1",
 322      "cost_per_1m_in": 1.35,
 323      "cost_per_1m_out": 5.4,
 324      "cost_per_1m_in_cached": 0,
 325      "cost_per_1m_out_cached": 0,
 326      "context_window": 128000,
 327      "default_max_tokens": 8000,
 328      "can_reason": true,
 329      "reasoning_levels": [
 330        "low",
 331        "medium",
 332        "high"
 333      ],
 334      "default_reasoning_effort": "medium",
 335      "supports_attachments": false
 336    },
 337    {
 338      "id": "deepseek/deepseek-v3.1",
 339      "name": "DeepSeek-V3.1",
 340      "cost_per_1m_in": 0.56,
 341      "cost_per_1m_out": 1.68,
 342      "cost_per_1m_in_cached": 0.28,
 343      "cost_per_1m_out_cached": 0,
 344      "context_window": 163840,
 345      "default_max_tokens": 8000,
 346      "can_reason": true,
 347      "reasoning_levels": [
 348        "low",
 349        "medium",
 350        "high"
 351      ],
 352      "default_reasoning_effort": "medium",
 353      "supports_attachments": false
 354    },
 355    {
 356      "id": "mistral/devstral-2",
 357      "name": "Devstral 2",
 358      "cost_per_1m_in": 0.4,
 359      "cost_per_1m_out": 2,
 360      "cost_per_1m_in_cached": 0,
 361      "cost_per_1m_out_cached": 0,
 362      "context_window": 256000,
 363      "default_max_tokens": 8000,
 364      "can_reason": false,
 365      "supports_attachments": false
 366    },
 367    {
 368      "id": "mistral/devstral-small",
 369      "name": "Devstral Small 1.1",
 370      "cost_per_1m_in": 0.1,
 371      "cost_per_1m_out": 0.3,
 372      "cost_per_1m_in_cached": 0,
 373      "cost_per_1m_out_cached": 0,
 374      "context_window": 128000,
 375      "default_max_tokens": 8000,
 376      "can_reason": false,
 377      "supports_attachments": false
 378    },
 379    {
 380      "id": "mistral/devstral-small-2",
 381      "name": "Devstral Small 2",
 382      "cost_per_1m_in": 0.1,
 383      "cost_per_1m_out": 0.3,
 384      "cost_per_1m_in_cached": 0,
 385      "cost_per_1m_out_cached": 0,
 386      "context_window": 256000,
 387      "default_max_tokens": 8000,
 388      "can_reason": false,
 389      "supports_attachments": false
 390    },
 391    {
 392      "id": "zai/glm-4.5-air",
 393      "name": "GLM 4.5 Air",
 394      "cost_per_1m_in": 0.2,
 395      "cost_per_1m_out": 1.1,
 396      "cost_per_1m_in_cached": 0.03,
 397      "cost_per_1m_out_cached": 0,
 398      "context_window": 128000,
 399      "default_max_tokens": 8000,
 400      "can_reason": true,
 401      "reasoning_levels": [
 402        "low",
 403        "medium",
 404        "high"
 405      ],
 406      "default_reasoning_effort": "medium",
 407      "supports_attachments": false
 408    },
 409    {
 410      "id": "zai/glm-4.5v",
 411      "name": "GLM 4.5V",
 412      "cost_per_1m_in": 0.6,
 413      "cost_per_1m_out": 1.8,
 414      "cost_per_1m_in_cached": 0.11,
 415      "cost_per_1m_out_cached": 0,
 416      "context_window": 66000,
 417      "default_max_tokens": 8000,
 418      "can_reason": false,
 419      "supports_attachments": true
 420    },
 421    {
 422      "id": "zai/glm-4.6",
 423      "name": "GLM 4.6",
 424      "cost_per_1m_in": 0.6,
 425      "cost_per_1m_out": 2.2,
 426      "cost_per_1m_in_cached": 0.11,
 427      "cost_per_1m_out_cached": 0,
 428      "context_window": 200000,
 429      "default_max_tokens": 8000,
 430      "can_reason": true,
 431      "reasoning_levels": [
 432        "low",
 433        "medium",
 434        "high"
 435      ],
 436      "default_reasoning_effort": "medium",
 437      "supports_attachments": false
 438    },
 439    {
 440      "id": "zai/glm-4.7",
 441      "name": "GLM 4.7",
 442      "cost_per_1m_in": 0.6,
 443      "cost_per_1m_out": 2.2,
 444      "cost_per_1m_in_cached": 0.11,
 445      "cost_per_1m_out_cached": 0,
 446      "context_window": 200000,
 447      "default_max_tokens": 8000,
 448      "can_reason": true,
 449      "reasoning_levels": [
 450        "low",
 451        "medium",
 452        "high"
 453      ],
 454      "default_reasoning_effort": "medium",
 455      "supports_attachments": false
 456    },
 457    {
 458      "id": "zai/glm-4.7-flash",
 459      "name": "GLM 4.7 Flash",
 460      "cost_per_1m_in": 0.07,
 461      "cost_per_1m_out": 0.4,
 462      "cost_per_1m_in_cached": 0,
 463      "cost_per_1m_out_cached": 0,
 464      "context_window": 200000,
 465      "default_max_tokens": 8000,
 466      "can_reason": true,
 467      "reasoning_levels": [
 468        "low",
 469        "medium",
 470        "high"
 471      ],
 472      "default_reasoning_effort": "medium",
 473      "supports_attachments": false
 474    },
 475    {
 476      "id": "zai/glm-4.7-flashx",
 477      "name": "GLM 4.7 FlashX",
 478      "cost_per_1m_in": 0.06,
 479      "cost_per_1m_out": 0.4,
 480      "cost_per_1m_in_cached": 0.01,
 481      "cost_per_1m_out_cached": 0,
 482      "context_window": 200000,
 483      "default_max_tokens": 8000,
 484      "can_reason": true,
 485      "reasoning_levels": [
 486        "low",
 487        "medium",
 488        "high"
 489      ],
 490      "default_reasoning_effort": "medium",
 491      "supports_attachments": false
 492    },
 493    {
 494      "id": "zai/glm-5",
 495      "name": "GLM 5",
 496      "cost_per_1m_in": 1,
 497      "cost_per_1m_out": 3.2,
 498      "cost_per_1m_in_cached": 0.2,
 499      "cost_per_1m_out_cached": 0,
 500      "context_window": 202800,
 501      "default_max_tokens": 8000,
 502      "can_reason": true,
 503      "reasoning_levels": [
 504        "low",
 505        "medium",
 506        "high"
 507      ],
 508      "default_reasoning_effort": "medium",
 509      "supports_attachments": false
 510    },
 511    {
 512      "id": "zai/glm-5-turbo",
 513      "name": "GLM 5 Turbo",
 514      "cost_per_1m_in": 1.2,
 515      "cost_per_1m_out": 4,
 516      "cost_per_1m_in_cached": 0.24,
 517      "cost_per_1m_out_cached": 0,
 518      "context_window": 202800,
 519      "default_max_tokens": 8000,
 520      "can_reason": true,
 521      "reasoning_levels": [
 522        "low",
 523        "medium",
 524        "high"
 525      ],
 526      "default_reasoning_effort": "medium",
 527      "supports_attachments": false
 528    },
 529    {
 530      "id": "zai/glm-4.5",
 531      "name": "GLM-4.5",
 532      "cost_per_1m_in": 0.6,
 533      "cost_per_1m_out": 2.2,
 534      "cost_per_1m_in_cached": 0.11,
 535      "cost_per_1m_out_cached": 0,
 536      "context_window": 128000,
 537      "default_max_tokens": 8000,
 538      "can_reason": true,
 539      "reasoning_levels": [
 540        "low",
 541        "medium",
 542        "high"
 543      ],
 544      "default_reasoning_effort": "medium",
 545      "supports_attachments": false
 546    },
 547    {
 548      "id": "zai/glm-4.6v",
 549      "name": "GLM-4.6V",
 550      "cost_per_1m_in": 0.3,
 551      "cost_per_1m_out": 0.9,
 552      "cost_per_1m_in_cached": 0.05,
 553      "cost_per_1m_out_cached": 0,
 554      "context_window": 128000,
 555      "default_max_tokens": 8000,
 556      "can_reason": true,
 557      "reasoning_levels": [
 558        "low",
 559        "medium",
 560        "high"
 561      ],
 562      "default_reasoning_effort": "medium",
 563      "supports_attachments": true
 564    },
 565    {
 566      "id": "zai/glm-4.6v-flash",
 567      "name": "GLM-4.6V-Flash",
 568      "cost_per_1m_in": 0,
 569      "cost_per_1m_out": 0,
 570      "cost_per_1m_in_cached": 0,
 571      "cost_per_1m_out_cached": 0,
 572      "context_window": 128000,
 573      "default_max_tokens": 8000,
 574      "can_reason": true,
 575      "reasoning_levels": [
 576        "low",
 577        "medium",
 578        "high"
 579      ],
 580      "default_reasoning_effort": "medium",
 581      "supports_attachments": true
 582    },
 583    {
 584      "id": "openai/gpt-5-chat",
 585      "name": "GPT 5 Chat",
 586      "cost_per_1m_in": 1.25,
 587      "cost_per_1m_out": 10,
 588      "cost_per_1m_in_cached": 0.125,
 589      "cost_per_1m_out_cached": 0,
 590      "context_window": 128000,
 591      "default_max_tokens": 8000,
 592      "can_reason": true,
 593      "reasoning_levels": [
 594        "low",
 595        "medium",
 596        "high"
 597      ],
 598      "default_reasoning_effort": "medium",
 599      "supports_attachments": true
 600    },
 601    {
 602      "id": "openai/gpt-5.1-codex-max",
 603      "name": "GPT 5.1 Codex Max",
 604      "cost_per_1m_in": 1.25,
 605      "cost_per_1m_out": 10,
 606      "cost_per_1m_in_cached": 0.125,
 607      "cost_per_1m_out_cached": 0,
 608      "context_window": 400000,
 609      "default_max_tokens": 8000,
 610      "can_reason": true,
 611      "reasoning_levels": [
 612        "low",
 613        "medium",
 614        "high"
 615      ],
 616      "default_reasoning_effort": "medium",
 617      "supports_attachments": true
 618    },
 619    {
 620      "id": "openai/gpt-5.1-codex-mini",
 621      "name": "GPT 5.1 Codex Mini",
 622      "cost_per_1m_in": 0.25,
 623      "cost_per_1m_out": 2,
 624      "cost_per_1m_in_cached": 0.025,
 625      "cost_per_1m_out_cached": 0,
 626      "context_window": 400000,
 627      "default_max_tokens": 8000,
 628      "can_reason": true,
 629      "reasoning_levels": [
 630        "low",
 631        "medium",
 632        "high"
 633      ],
 634      "default_reasoning_effort": "medium",
 635      "supports_attachments": true
 636    },
 637    {
 638      "id": "openai/gpt-5.1-thinking",
 639      "name": "GPT 5.1 Thinking",
 640      "cost_per_1m_in": 1.25,
 641      "cost_per_1m_out": 10,
 642      "cost_per_1m_in_cached": 0.125,
 643      "cost_per_1m_out_cached": 0,
 644      "context_window": 400000,
 645      "default_max_tokens": 8000,
 646      "can_reason": true,
 647      "reasoning_levels": [
 648        "low",
 649        "medium",
 650        "high"
 651      ],
 652      "default_reasoning_effort": "medium",
 653      "supports_attachments": true
 654    },
 655    {
 656      "id": "openai/gpt-5.2",
 657      "name": "GPT 5.2",
 658      "cost_per_1m_in": 1.75,
 659      "cost_per_1m_out": 14,
 660      "cost_per_1m_in_cached": 0.175,
 661      "cost_per_1m_out_cached": 0,
 662      "context_window": 400000,
 663      "default_max_tokens": 8000,
 664      "can_reason": true,
 665      "reasoning_levels": [
 666        "low",
 667        "medium",
 668        "high"
 669      ],
 670      "default_reasoning_effort": "medium",
 671      "supports_attachments": true
 672    },
 673    {
 674      "id": "openai/gpt-5.2-pro",
 675      "name": "GPT 5.2 ",
 676      "cost_per_1m_in": 21,
 677      "cost_per_1m_out": 168,
 678      "cost_per_1m_in_cached": 0,
 679      "cost_per_1m_out_cached": 0,
 680      "context_window": 400000,
 681      "default_max_tokens": 8000,
 682      "can_reason": true,
 683      "reasoning_levels": [
 684        "low",
 685        "medium",
 686        "high"
 687      ],
 688      "default_reasoning_effort": "medium",
 689      "supports_attachments": true
 690    },
 691    {
 692      "id": "openai/gpt-5.2-chat",
 693      "name": "GPT 5.2 Chat",
 694      "cost_per_1m_in": 1.75,
 695      "cost_per_1m_out": 14,
 696      "cost_per_1m_in_cached": 0.175,
 697      "cost_per_1m_out_cached": 0,
 698      "context_window": 128000,
 699      "default_max_tokens": 8000,
 700      "can_reason": true,
 701      "reasoning_levels": [
 702        "low",
 703        "medium",
 704        "high"
 705      ],
 706      "default_reasoning_effort": "medium",
 707      "supports_attachments": true
 708    },
 709    {
 710      "id": "openai/gpt-5.2-codex",
 711      "name": "GPT 5.2 Codex",
 712      "cost_per_1m_in": 1.75,
 713      "cost_per_1m_out": 14,
 714      "cost_per_1m_in_cached": 0.175,
 715      "cost_per_1m_out_cached": 0,
 716      "context_window": 400000,
 717      "default_max_tokens": 8000,
 718      "can_reason": true,
 719      "reasoning_levels": [
 720        "low",
 721        "medium",
 722        "high"
 723      ],
 724      "default_reasoning_effort": "medium",
 725      "supports_attachments": true
 726    },
 727    {
 728      "id": "openai/gpt-5.3-codex",
 729      "name": "GPT 5.3 Codex",
 730      "cost_per_1m_in": 1.75,
 731      "cost_per_1m_out": 14,
 732      "cost_per_1m_in_cached": 0.175,
 733      "cost_per_1m_out_cached": 0,
 734      "context_window": 400000,
 735      "default_max_tokens": 8000,
 736      "can_reason": true,
 737      "reasoning_levels": [
 738        "low",
 739        "medium",
 740        "high"
 741      ],
 742      "default_reasoning_effort": "medium",
 743      "supports_attachments": true
 744    },
 745    {
 746      "id": "openai/gpt-5.4",
 747      "name": "GPT 5.4",
 748      "cost_per_1m_in": 2.5,
 749      "cost_per_1m_out": 15,
 750      "cost_per_1m_in_cached": 0.25,
 751      "cost_per_1m_out_cached": 0,
 752      "context_window": 1050000,
 753      "default_max_tokens": 8000,
 754      "can_reason": true,
 755      "reasoning_levels": [
 756        "low",
 757        "medium",
 758        "high"
 759      ],
 760      "default_reasoning_effort": "medium",
 761      "supports_attachments": true
 762    },
 763    {
 764      "id": "openai/gpt-5.4-mini",
 765      "name": "GPT 5.4 Mini",
 766      "cost_per_1m_in": 0.75,
 767      "cost_per_1m_out": 4.5,
 768      "cost_per_1m_in_cached": 0.075,
 769      "cost_per_1m_out_cached": 0,
 770      "context_window": 400000,
 771      "default_max_tokens": 8000,
 772      "can_reason": true,
 773      "reasoning_levels": [
 774        "low",
 775        "medium",
 776        "high"
 777      ],
 778      "default_reasoning_effort": "medium",
 779      "supports_attachments": true
 780    },
 781    {
 782      "id": "openai/gpt-5.4-nano",
 783      "name": "GPT 5.4 Nano",
 784      "cost_per_1m_in": 0.2,
 785      "cost_per_1m_out": 1.25,
 786      "cost_per_1m_in_cached": 0.02,
 787      "cost_per_1m_out_cached": 0,
 788      "context_window": 400000,
 789      "default_max_tokens": 8000,
 790      "can_reason": true,
 791      "reasoning_levels": [
 792        "low",
 793        "medium",
 794        "high"
 795      ],
 796      "default_reasoning_effort": "medium",
 797      "supports_attachments": true
 798    },
 799    {
 800      "id": "openai/gpt-5.4-pro",
 801      "name": "GPT 5.4 Pro",
 802      "cost_per_1m_in": 30,
 803      "cost_per_1m_out": 180,
 804      "cost_per_1m_in_cached": 0,
 805      "cost_per_1m_out_cached": 0,
 806      "context_window": 1050000,
 807      "default_max_tokens": 8000,
 808      "can_reason": true,
 809      "reasoning_levels": [
 810        "low",
 811        "medium",
 812        "high"
 813      ],
 814      "default_reasoning_effort": "medium",
 815      "supports_attachments": true
 816    },
 817    {
 818      "id": "openai/gpt-4-turbo",
 819      "name": "GPT-4 Turbo",
 820      "cost_per_1m_in": 10,
 821      "cost_per_1m_out": 30,
 822      "cost_per_1m_in_cached": 0,
 823      "cost_per_1m_out_cached": 0,
 824      "context_window": 128000,
 825      "default_max_tokens": 4096,
 826      "can_reason": false,
 827      "supports_attachments": true
 828    },
 829    {
 830      "id": "openai/gpt-4.1",
 831      "name": "GPT-4.1",
 832      "cost_per_1m_in": 2,
 833      "cost_per_1m_out": 8,
 834      "cost_per_1m_in_cached": 0.5,
 835      "cost_per_1m_out_cached": 0,
 836      "context_window": 1047576,
 837      "default_max_tokens": 8000,
 838      "can_reason": false,
 839      "supports_attachments": true
 840    },
 841    {
 842      "id": "openai/gpt-4.1-mini",
 843      "name": "GPT-4.1 mini",
 844      "cost_per_1m_in": 0.4,
 845      "cost_per_1m_out": 1.6,
 846      "cost_per_1m_in_cached": 0.1,
 847      "cost_per_1m_out_cached": 0,
 848      "context_window": 1047576,
 849      "default_max_tokens": 8000,
 850      "can_reason": false,
 851      "supports_attachments": true
 852    },
 853    {
 854      "id": "openai/gpt-4.1-nano",
 855      "name": "GPT-4.1 nano",
 856      "cost_per_1m_in": 0.1,
 857      "cost_per_1m_out": 0.4,
 858      "cost_per_1m_in_cached": 0.025,
 859      "cost_per_1m_out_cached": 0,
 860      "context_window": 1047576,
 861      "default_max_tokens": 8000,
 862      "can_reason": false,
 863      "supports_attachments": true
 864    },
 865    {
 866      "id": "openai/gpt-4o",
 867      "name": "GPT-4o",
 868      "cost_per_1m_in": 2.5,
 869      "cost_per_1m_out": 10,
 870      "cost_per_1m_in_cached": 1.25,
 871      "cost_per_1m_out_cached": 0,
 872      "context_window": 128000,
 873      "default_max_tokens": 8000,
 874      "can_reason": false,
 875      "supports_attachments": true
 876    },
 877    {
 878      "id": "openai/gpt-4o-mini",
 879      "name": "GPT-4o mini",
 880      "cost_per_1m_in": 0.15,
 881      "cost_per_1m_out": 0.6,
 882      "cost_per_1m_in_cached": 0.075,
 883      "cost_per_1m_out_cached": 0,
 884      "context_window": 128000,
 885      "default_max_tokens": 8000,
 886      "can_reason": false,
 887      "supports_attachments": true
 888    },
 889    {
 890      "id": "openai/gpt-5",
 891      "name": "GPT-5",
 892      "cost_per_1m_in": 1.25,
 893      "cost_per_1m_out": 10,
 894      "cost_per_1m_in_cached": 0.125,
 895      "cost_per_1m_out_cached": 0,
 896      "context_window": 400000,
 897      "default_max_tokens": 8000,
 898      "can_reason": true,
 899      "reasoning_levels": [
 900        "low",
 901        "medium",
 902        "high"
 903      ],
 904      "default_reasoning_effort": "medium",
 905      "supports_attachments": true
 906    },
 907    {
 908      "id": "openai/gpt-5-mini",
 909      "name": "GPT-5 mini",
 910      "cost_per_1m_in": 0.25,
 911      "cost_per_1m_out": 2,
 912      "cost_per_1m_in_cached": 0.025,
 913      "cost_per_1m_out_cached": 0,
 914      "context_window": 400000,
 915      "default_max_tokens": 8000,
 916      "can_reason": true,
 917      "reasoning_levels": [
 918        "low",
 919        "medium",
 920        "high"
 921      ],
 922      "default_reasoning_effort": "medium",
 923      "supports_attachments": true
 924    },
 925    {
 926      "id": "openai/gpt-5-nano",
 927      "name": "GPT-5 nano",
 928      "cost_per_1m_in": 0.05,
 929      "cost_per_1m_out": 0.4,
 930      "cost_per_1m_in_cached": 0.005,
 931      "cost_per_1m_out_cached": 0,
 932      "context_window": 400000,
 933      "default_max_tokens": 8000,
 934      "can_reason": true,
 935      "reasoning_levels": [
 936        "low",
 937        "medium",
 938        "high"
 939      ],
 940      "default_reasoning_effort": "medium",
 941      "supports_attachments": true
 942    },
 943    {
 944      "id": "openai/gpt-5-pro",
 945      "name": "GPT-5 pro",
 946      "cost_per_1m_in": 15,
 947      "cost_per_1m_out": 120,
 948      "cost_per_1m_in_cached": 0,
 949      "cost_per_1m_out_cached": 0,
 950      "context_window": 400000,
 951      "default_max_tokens": 8000,
 952      "can_reason": true,
 953      "reasoning_levels": [
 954        "low",
 955        "medium",
 956        "high"
 957      ],
 958      "default_reasoning_effort": "medium",
 959      "supports_attachments": true
 960    },
 961    {
 962      "id": "openai/gpt-5-codex",
 963      "name": "GPT-5-Codex",
 964      "cost_per_1m_in": 1.25,
 965      "cost_per_1m_out": 10,
 966      "cost_per_1m_in_cached": 0.125,
 967      "cost_per_1m_out_cached": 0,
 968      "context_window": 400000,
 969      "default_max_tokens": 8000,
 970      "can_reason": true,
 971      "reasoning_levels": [
 972        "low",
 973        "medium",
 974        "high"
 975      ],
 976      "default_reasoning_effort": "medium",
 977      "supports_attachments": false
 978    },
 979    {
 980      "id": "openai/gpt-5.1-instant",
 981      "name": "GPT-5.1 Instant",
 982      "cost_per_1m_in": 1.25,
 983      "cost_per_1m_out": 10,
 984      "cost_per_1m_in_cached": 0.125,
 985      "cost_per_1m_out_cached": 0,
 986      "context_window": 128000,
 987      "default_max_tokens": 8000,
 988      "can_reason": true,
 989      "reasoning_levels": [
 990        "low",
 991        "medium",
 992        "high"
 993      ],
 994      "default_reasoning_effort": "medium",
 995      "supports_attachments": true
 996    },
 997    {
 998      "id": "openai/gpt-5.1-codex",
 999      "name": "GPT-5.1-Codex",
1000      "cost_per_1m_in": 1.25,
1001      "cost_per_1m_out": 10,
1002      "cost_per_1m_in_cached": 0.125,
1003      "cost_per_1m_out_cached": 0,
1004      "context_window": 400000,
1005      "default_max_tokens": 8000,
1006      "can_reason": true,
1007      "reasoning_levels": [
1008        "low",
1009        "medium",
1010        "high"
1011      ],
1012      "default_reasoning_effort": "medium",
1013      "supports_attachments": true
1014    },
1015    {
1016      "id": "openai/gpt-5.3-chat",
1017      "name": "GPT-5.3 Chat",
1018      "cost_per_1m_in": 1.75,
1019      "cost_per_1m_out": 14,
1020      "cost_per_1m_in_cached": 0.175,
1021      "cost_per_1m_out_cached": 0,
1022      "context_window": 128000,
1023      "default_max_tokens": 8000,
1024      "can_reason": true,
1025      "reasoning_levels": [
1026        "low",
1027        "medium",
1028        "high"
1029      ],
1030      "default_reasoning_effort": "medium",
1031      "supports_attachments": true
1032    },
1033    {
1034      "id": "google/gemini-2.0-flash",
1035      "name": "Gemini 2.0 Flash",
1036      "cost_per_1m_in": 0.15,
1037      "cost_per_1m_out": 0.6,
1038      "cost_per_1m_in_cached": 0.025,
1039      "cost_per_1m_out_cached": 0,
1040      "context_window": 1048576,
1041      "default_max_tokens": 8000,
1042      "can_reason": false,
1043      "supports_attachments": true
1044    },
1045    {
1046      "id": "google/gemini-2.0-flash-lite",
1047      "name": "Gemini 2.0 Flash Lite",
1048      "cost_per_1m_in": 0.075,
1049      "cost_per_1m_out": 0.3,
1050      "cost_per_1m_in_cached": 0.02,
1051      "cost_per_1m_out_cached": 0,
1052      "context_window": 1048576,
1053      "default_max_tokens": 8000,
1054      "can_reason": false,
1055      "supports_attachments": true
1056    },
1057    {
1058      "id": "google/gemini-2.5-flash",
1059      "name": "Gemini 2.5 Flash",
1060      "cost_per_1m_in": 0.3,
1061      "cost_per_1m_out": 2.5,
1062      "cost_per_1m_in_cached": 0.03,
1063      "cost_per_1m_out_cached": 0,
1064      "context_window": 1000000,
1065      "default_max_tokens": 8000,
1066      "can_reason": true,
1067      "reasoning_levels": [
1068        "low",
1069        "medium",
1070        "high"
1071      ],
1072      "default_reasoning_effort": "medium",
1073      "supports_attachments": true
1074    },
1075    {
1076      "id": "google/gemini-2.5-flash-lite",
1077      "name": "Gemini 2.5 Flash Lite",
1078      "cost_per_1m_in": 0.1,
1079      "cost_per_1m_out": 0.4,
1080      "cost_per_1m_in_cached": 0.01,
1081      "cost_per_1m_out_cached": 0,
1082      "context_window": 1048576,
1083      "default_max_tokens": 8000,
1084      "can_reason": true,
1085      "reasoning_levels": [
1086        "low",
1087        "medium",
1088        "high"
1089      ],
1090      "default_reasoning_effort": "medium",
1091      "supports_attachments": true
1092    },
1093    {
1094      "id": "google/gemini-2.5-pro",
1095      "name": "Gemini 2.5 Pro",
1096      "cost_per_1m_in": 1.25,
1097      "cost_per_1m_out": 10,
1098      "cost_per_1m_in_cached": 0.125,
1099      "cost_per_1m_out_cached": 0,
1100      "context_window": 1048576,
1101      "default_max_tokens": 8000,
1102      "can_reason": true,
1103      "reasoning_levels": [
1104        "low",
1105        "medium",
1106        "high"
1107      ],
1108      "default_reasoning_effort": "medium",
1109      "supports_attachments": true
1110    },
1111    {
1112      "id": "google/gemini-3-flash",
1113      "name": "Gemini 3 Flash",
1114      "cost_per_1m_in": 0.5,
1115      "cost_per_1m_out": 3,
1116      "cost_per_1m_in_cached": 0.05,
1117      "cost_per_1m_out_cached": 0,
1118      "context_window": 1000000,
1119      "default_max_tokens": 8000,
1120      "can_reason": true,
1121      "reasoning_levels": [
1122        "low",
1123        "medium",
1124        "high"
1125      ],
1126      "default_reasoning_effort": "medium",
1127      "supports_attachments": true
1128    },
1129    {
1130      "id": "google/gemini-3-pro-preview",
1131      "name": "Gemini 3 Pro Preview",
1132      "cost_per_1m_in": 2,
1133      "cost_per_1m_out": 12,
1134      "cost_per_1m_in_cached": 0.2,
1135      "cost_per_1m_out_cached": 0,
1136      "context_window": 1000000,
1137      "default_max_tokens": 8000,
1138      "can_reason": true,
1139      "reasoning_levels": [
1140        "low",
1141        "medium",
1142        "high"
1143      ],
1144      "default_reasoning_effort": "medium",
1145      "supports_attachments": true
1146    },
1147    {
1148      "id": "google/gemini-3.1-flash-lite-preview",
1149      "name": "Gemini 3.1 Flash Lite Preview",
1150      "cost_per_1m_in": 0.25,
1151      "cost_per_1m_out": 1.5,
1152      "cost_per_1m_in_cached": 0,
1153      "cost_per_1m_out_cached": 0,
1154      "context_window": 1000000,
1155      "default_max_tokens": 8000,
1156      "can_reason": true,
1157      "reasoning_levels": [
1158        "low",
1159        "medium",
1160        "high"
1161      ],
1162      "default_reasoning_effort": "medium",
1163      "supports_attachments": true
1164    },
1165    {
1166      "id": "google/gemini-3.1-pro-preview",
1167      "name": "Gemini 3.1 Pro Preview",
1168      "cost_per_1m_in": 2,
1169      "cost_per_1m_out": 12,
1170      "cost_per_1m_in_cached": 0.2,
1171      "cost_per_1m_out_cached": 0,
1172      "context_window": 1000000,
1173      "default_max_tokens": 8000,
1174      "can_reason": true,
1175      "reasoning_levels": [
1176        "low",
1177        "medium",
1178        "high"
1179      ],
1180      "default_reasoning_effort": "medium",
1181      "supports_attachments": true
1182    },
1183    {
1184      "id": "xai/grok-2-vision",
1185      "name": "Grok 2 Vision",
1186      "cost_per_1m_in": 2,
1187      "cost_per_1m_out": 10,
1188      "cost_per_1m_in_cached": 0,
1189      "cost_per_1m_out_cached": 0,
1190      "context_window": 32768,
1191      "default_max_tokens": 8000,
1192      "can_reason": false,
1193      "supports_attachments": true
1194    },
1195    {
1196      "id": "xai/grok-3",
1197      "name": "Grok 3 Beta",
1198      "cost_per_1m_in": 3,
1199      "cost_per_1m_out": 15,
1200      "cost_per_1m_in_cached": 0.75,
1201      "cost_per_1m_out_cached": 0,
1202      "context_window": 131072,
1203      "default_max_tokens": 8000,
1204      "can_reason": false,
1205      "supports_attachments": false
1206    },
1207    {
1208      "id": "xai/grok-3-fast",
1209      "name": "Grok 3 Fast Beta",
1210      "cost_per_1m_in": 5,
1211      "cost_per_1m_out": 25,
1212      "cost_per_1m_in_cached": 1.25,
1213      "cost_per_1m_out_cached": 0,
1214      "context_window": 131072,
1215      "default_max_tokens": 8000,
1216      "can_reason": false,
1217      "supports_attachments": false
1218    },
1219    {
1220      "id": "xai/grok-3-mini",
1221      "name": "Grok 3 Mini Beta",
1222      "cost_per_1m_in": 0.3,
1223      "cost_per_1m_out": 0.5,
1224      "cost_per_1m_in_cached": 0.075,
1225      "cost_per_1m_out_cached": 0,
1226      "context_window": 131072,
1227      "default_max_tokens": 8000,
1228      "can_reason": false,
1229      "supports_attachments": false
1230    },
1231    {
1232      "id": "xai/grok-3-mini-fast",
1233      "name": "Grok 3 Mini Fast Beta",
1234      "cost_per_1m_in": 0.6,
1235      "cost_per_1m_out": 4,
1236      "cost_per_1m_in_cached": 0,
1237      "cost_per_1m_out_cached": 0,
1238      "context_window": 131072,
1239      "default_max_tokens": 8000,
1240      "can_reason": false,
1241      "supports_attachments": false
1242    },
1243    {
1244      "id": "xai/grok-4",
1245      "name": "Grok 4",
1246      "cost_per_1m_in": 3,
1247      "cost_per_1m_out": 15,
1248      "cost_per_1m_in_cached": 0.75,
1249      "cost_per_1m_out_cached": 0,
1250      "context_window": 256000,
1251      "default_max_tokens": 8000,
1252      "can_reason": true,
1253      "reasoning_levels": [
1254        "low",
1255        "medium",
1256        "high"
1257      ],
1258      "default_reasoning_effort": "medium",
1259      "supports_attachments": true
1260    },
1261    {
1262      "id": "xai/grok-4-fast-non-reasoning",
1263      "name": "Grok 4 Fast Non-Reasoning",
1264      "cost_per_1m_in": 0.2,
1265      "cost_per_1m_out": 0.5,
1266      "cost_per_1m_in_cached": 0.05,
1267      "cost_per_1m_out_cached": 0,
1268      "context_window": 2000000,
1269      "default_max_tokens": 8000,
1270      "can_reason": false,
1271      "supports_attachments": false
1272    },
1273    {
1274      "id": "xai/grok-4-fast-reasoning",
1275      "name": "Grok 4 Fast Reasoning",
1276      "cost_per_1m_in": 0.2,
1277      "cost_per_1m_out": 0.5,
1278      "cost_per_1m_in_cached": 0.05,
1279      "cost_per_1m_out_cached": 0,
1280      "context_window": 2000000,
1281      "default_max_tokens": 8000,
1282      "can_reason": true,
1283      "reasoning_levels": [
1284        "low",
1285        "medium",
1286        "high"
1287      ],
1288      "default_reasoning_effort": "medium",
1289      "supports_attachments": false
1290    },
1291    {
1292      "id": "xai/grok-4.1-fast-non-reasoning",
1293      "name": "Grok 4.1 Fast Non-Reasoning",
1294      "cost_per_1m_in": 0.2,
1295      "cost_per_1m_out": 0.5,
1296      "cost_per_1m_in_cached": 0.05,
1297      "cost_per_1m_out_cached": 0,
1298      "context_window": 2000000,
1299      "default_max_tokens": 8000,
1300      "can_reason": false,
1301      "supports_attachments": false
1302    },
1303    {
1304      "id": "xai/grok-4.1-fast-reasoning",
1305      "name": "Grok 4.1 Fast Reasoning",
1306      "cost_per_1m_in": 0.2,
1307      "cost_per_1m_out": 0.5,
1308      "cost_per_1m_in_cached": 0.05,
1309      "cost_per_1m_out_cached": 0,
1310      "context_window": 2000000,
1311      "default_max_tokens": 8000,
1312      "can_reason": true,
1313      "reasoning_levels": [
1314        "low",
1315        "medium",
1316        "high"
1317      ],
1318      "default_reasoning_effort": "medium",
1319      "supports_attachments": false
1320    },
1321    {
1322      "id": "xai/grok-4.20-non-reasoning-beta",
1323      "name": "Grok 4.20 Beta Non-Reasoning",
1324      "cost_per_1m_in": 2,
1325      "cost_per_1m_out": 6,
1326      "cost_per_1m_in_cached": 0.2,
1327      "cost_per_1m_out_cached": 0,
1328      "context_window": 2000000,
1329      "default_max_tokens": 8000,
1330      "can_reason": false,
1331      "supports_attachments": true
1332    },
1333    {
1334      "id": "xai/grok-4.20-reasoning-beta",
1335      "name": "Grok 4.20 Beta Reasoning",
1336      "cost_per_1m_in": 2,
1337      "cost_per_1m_out": 6,
1338      "cost_per_1m_in_cached": 0.2,
1339      "cost_per_1m_out_cached": 0,
1340      "context_window": 2000000,
1341      "default_max_tokens": 8000,
1342      "can_reason": true,
1343      "reasoning_levels": [
1344        "low",
1345        "medium",
1346        "high"
1347      ],
1348      "default_reasoning_effort": "medium",
1349      "supports_attachments": true
1350    },
1351    {
1352      "id": "xai/grok-4.20-multi-agent-beta",
1353      "name": "Grok 4.20 Multi Agent Beta",
1354      "cost_per_1m_in": 2,
1355      "cost_per_1m_out": 6,
1356      "cost_per_1m_in_cached": 0.2,
1357      "cost_per_1m_out_cached": 0,
1358      "context_window": 2000000,
1359      "default_max_tokens": 8000,
1360      "can_reason": true,
1361      "reasoning_levels": [
1362        "low",
1363        "medium",
1364        "high"
1365      ],
1366      "default_reasoning_effort": "medium",
1367      "supports_attachments": false
1368    },
1369    {
1370      "id": "xai/grok-4.20-multi-agent",
1371      "name": "Grok 4.20 Multi-Agent",
1372      "cost_per_1m_in": 2,
1373      "cost_per_1m_out": 6,
1374      "cost_per_1m_in_cached": 0.2,
1375      "cost_per_1m_out_cached": 0,
1376      "context_window": 2000000,
1377      "default_max_tokens": 8000,
1378      "can_reason": true,
1379      "reasoning_levels": [
1380        "low",
1381        "medium",
1382        "high"
1383      ],
1384      "default_reasoning_effort": "medium",
1385      "supports_attachments": false
1386    },
1387    {
1388      "id": "xai/grok-4.20-non-reasoning",
1389      "name": "Grok 4.20 Non-Reasoning",
1390      "cost_per_1m_in": 2,
1391      "cost_per_1m_out": 6,
1392      "cost_per_1m_in_cached": 0.2,
1393      "cost_per_1m_out_cached": 0,
1394      "context_window": 2000000,
1395      "default_max_tokens": 8000,
1396      "can_reason": false,
1397      "supports_attachments": true
1398    },
1399    {
1400      "id": "xai/grok-4.20-reasoning",
1401      "name": "Grok 4.20 Reasoning",
1402      "cost_per_1m_in": 2,
1403      "cost_per_1m_out": 6,
1404      "cost_per_1m_in_cached": 0.2,
1405      "cost_per_1m_out_cached": 0,
1406      "context_window": 2000000,
1407      "default_max_tokens": 8000,
1408      "can_reason": true,
1409      "reasoning_levels": [
1410        "low",
1411        "medium",
1412        "high"
1413      ],
1414      "default_reasoning_effort": "medium",
1415      "supports_attachments": true
1416    },
1417    {
1418      "id": "xai/grok-code-fast-1",
1419      "name": "Grok Code Fast 1",
1420      "cost_per_1m_in": 0.2,
1421      "cost_per_1m_out": 1.5,
1422      "cost_per_1m_in_cached": 0.02,
1423      "cost_per_1m_out_cached": 0,
1424      "context_window": 256000,
1425      "default_max_tokens": 8000,
1426      "can_reason": true,
1427      "reasoning_levels": [
1428        "low",
1429        "medium",
1430        "high"
1431      ],
1432      "default_reasoning_effort": "medium",
1433      "supports_attachments": false
1434    },
1435    {
1436      "id": "prime-intellect/intellect-3",
1437      "name": "INTELLECT 3",
1438      "cost_per_1m_in": 0.2,
1439      "cost_per_1m_out": 1.1,
1440      "cost_per_1m_in_cached": 0,
1441      "cost_per_1m_out_cached": 0,
1442      "context_window": 131072,
1443      "default_max_tokens": 8000,
1444      "can_reason": true,
1445      "reasoning_levels": [
1446        "low",
1447        "medium",
1448        "high"
1449      ],
1450      "default_reasoning_effort": "medium",
1451      "supports_attachments": false
1452    },
1453    {
1454      "id": "kwaipilot/kat-coder-pro-v2",
1455      "name": "Kat Coder Pro V2",
1456      "cost_per_1m_in": 0.3,
1457      "cost_per_1m_out": 1.2,
1458      "cost_per_1m_in_cached": 0.06,
1459      "cost_per_1m_out_cached": 0,
1460      "context_window": 256000,
1461      "default_max_tokens": 8000,
1462      "can_reason": true,
1463      "reasoning_levels": [
1464        "low",
1465        "medium",
1466        "high"
1467      ],
1468      "default_reasoning_effort": "medium",
1469      "supports_attachments": false
1470    },
1471    {
1472      "id": "moonshotai/kimi-k2",
1473      "name": "Kimi K2",
1474      "cost_per_1m_in": 0.6,
1475      "cost_per_1m_out": 2.5,
1476      "cost_per_1m_in_cached": 0.15,
1477      "cost_per_1m_out_cached": 0,
1478      "context_window": 131072,
1479      "default_max_tokens": 8000,
1480      "can_reason": false,
1481      "supports_attachments": false
1482    },
1483    {
1484      "id": "moonshotai/kimi-k2-0905",
1485      "name": "Kimi K2 0905",
1486      "cost_per_1m_in": 0.6,
1487      "cost_per_1m_out": 2.5,
1488      "cost_per_1m_in_cached": 0.15,
1489      "cost_per_1m_out_cached": 0,
1490      "context_window": 256000,
1491      "default_max_tokens": 8000,
1492      "can_reason": false,
1493      "supports_attachments": false
1494    },
1495    {
1496      "id": "moonshotai/kimi-k2-thinking",
1497      "name": "Kimi K2 Thinking",
1498      "cost_per_1m_in": 0.6,
1499      "cost_per_1m_out": 2.5,
1500      "cost_per_1m_in_cached": 0.15,
1501      "cost_per_1m_out_cached": 0,
1502      "context_window": 262114,
1503      "default_max_tokens": 8000,
1504      "can_reason": true,
1505      "reasoning_levels": [
1506        "low",
1507        "medium",
1508        "high"
1509      ],
1510      "default_reasoning_effort": "medium",
1511      "supports_attachments": false
1512    },
1513    {
1514      "id": "moonshotai/kimi-k2-thinking-turbo",
1515      "name": "Kimi K2 Thinking Turbo",
1516      "cost_per_1m_in": 1.15,
1517      "cost_per_1m_out": 8,
1518      "cost_per_1m_in_cached": 0.15,
1519      "cost_per_1m_out_cached": 0,
1520      "context_window": 262114,
1521      "default_max_tokens": 8000,
1522      "can_reason": true,
1523      "reasoning_levels": [
1524        "low",
1525        "medium",
1526        "high"
1527      ],
1528      "default_reasoning_effort": "medium",
1529      "supports_attachments": false
1530    },
1531    {
1532      "id": "moonshotai/kimi-k2-turbo",
1533      "name": "Kimi K2 Turbo",
1534      "cost_per_1m_in": 1.15,
1535      "cost_per_1m_out": 8,
1536      "cost_per_1m_in_cached": 0.15,
1537      "cost_per_1m_out_cached": 0,
1538      "context_window": 256000,
1539      "default_max_tokens": 8000,
1540      "can_reason": false,
1541      "supports_attachments": false
1542    },
1543    {
1544      "id": "moonshotai/kimi-k2.5",
1545      "name": "Kimi K2.5",
1546      "cost_per_1m_in": 0.6,
1547      "cost_per_1m_out": 3,
1548      "cost_per_1m_in_cached": 0.1,
1549      "cost_per_1m_out_cached": 0,
1550      "context_window": 262114,
1551      "default_max_tokens": 8000,
1552      "can_reason": true,
1553      "reasoning_levels": [
1554        "low",
1555        "medium",
1556        "high"
1557      ],
1558      "default_reasoning_effort": "medium",
1559      "supports_attachments": true
1560    },
1561    {
1562      "id": "meta/llama-3.1-70b",
1563      "name": "Llama 3.1 70B Instruct",
1564      "cost_per_1m_in": 0.72,
1565      "cost_per_1m_out": 0.72,
1566      "cost_per_1m_in_cached": 0,
1567      "cost_per_1m_out_cached": 0,
1568      "context_window": 128000,
1569      "default_max_tokens": 8000,
1570      "can_reason": false,
1571      "supports_attachments": false
1572    },
1573    {
1574      "id": "meta/llama-3.1-8b",
1575      "name": "Llama 3.1 8B Instruct",
1576      "cost_per_1m_in": 0.22,
1577      "cost_per_1m_out": 0.22,
1578      "cost_per_1m_in_cached": 0,
1579      "cost_per_1m_out_cached": 0,
1580      "context_window": 128000,
1581      "default_max_tokens": 8000,
1582      "can_reason": false,
1583      "supports_attachments": false
1584    },
1585    {
1586      "id": "meta/llama-3.2-11b",
1587      "name": "Llama 3.2 11B Vision Instruct",
1588      "cost_per_1m_in": 0.16,
1589      "cost_per_1m_out": 0.16,
1590      "cost_per_1m_in_cached": 0,
1591      "cost_per_1m_out_cached": 0,
1592      "context_window": 128000,
1593      "default_max_tokens": 8000,
1594      "can_reason": false,
1595      "supports_attachments": true
1596    },
1597    {
1598      "id": "meta/llama-3.2-90b",
1599      "name": "Llama 3.2 90B Vision Instruct",
1600      "cost_per_1m_in": 0.72,
1601      "cost_per_1m_out": 0.72,
1602      "cost_per_1m_in_cached": 0,
1603      "cost_per_1m_out_cached": 0,
1604      "context_window": 128000,
1605      "default_max_tokens": 8000,
1606      "can_reason": false,
1607      "supports_attachments": true
1608    },
1609    {
1610      "id": "meta/llama-3.3-70b",
1611      "name": "Llama 3.3 70B Instruct",
1612      "cost_per_1m_in": 0.72,
1613      "cost_per_1m_out": 0.72,
1614      "cost_per_1m_in_cached": 0,
1615      "cost_per_1m_out_cached": 0,
1616      "context_window": 128000,
1617      "default_max_tokens": 8000,
1618      "can_reason": false,
1619      "supports_attachments": false
1620    },
1621    {
1622      "id": "meta/llama-4-maverick",
1623      "name": "Llama 4 Maverick 17B Instruct",
1624      "cost_per_1m_in": 0.35,
1625      "cost_per_1m_out": 1.15,
1626      "cost_per_1m_in_cached": 0,
1627      "cost_per_1m_out_cached": 0,
1628      "context_window": 524288,
1629      "default_max_tokens": 8000,
1630      "can_reason": false,
1631      "supports_attachments": true
1632    },
1633    {
1634      "id": "meta/llama-4-scout",
1635      "name": "Llama 4 Scout 17B Instruct",
1636      "cost_per_1m_in": 0.17,
1637      "cost_per_1m_out": 0.66,
1638      "cost_per_1m_in_cached": 0,
1639      "cost_per_1m_out_cached": 0,
1640      "context_window": 128000,
1641      "default_max_tokens": 8000,
1642      "can_reason": false,
1643      "supports_attachments": true
1644    },
1645    {
1646      "id": "meituan/longcat-flash-chat",
1647      "name": "LongCat Flash Chat",
1648      "cost_per_1m_in": 0,
1649      "cost_per_1m_out": 0,
1650      "cost_per_1m_in_cached": 0,
1651      "cost_per_1m_out_cached": 0,
1652      "context_window": 128000,
1653      "default_max_tokens": 8000,
1654      "can_reason": false,
1655      "supports_attachments": false
1656    },
1657    {
1658      "id": "meituan/longcat-flash-thinking",
1659      "name": "LongCat Flash Thinking",
1660      "cost_per_1m_in": 0.15,
1661      "cost_per_1m_out": 1.5,
1662      "cost_per_1m_in_cached": 0,
1663      "cost_per_1m_out_cached": 0,
1664      "context_window": 128000,
1665      "default_max_tokens": 8000,
1666      "can_reason": true,
1667      "reasoning_levels": [
1668        "low",
1669        "medium",
1670        "high"
1671      ],
1672      "default_reasoning_effort": "medium",
1673      "supports_attachments": false
1674    },
1675    {
1676      "id": "inception/mercury-2",
1677      "name": "Mercury 2",
1678      "cost_per_1m_in": 0.25,
1679      "cost_per_1m_out": 0.75,
1680      "cost_per_1m_in_cached": 0.025,
1681      "cost_per_1m_out_cached": 0,
1682      "context_window": 128000,
1683      "default_max_tokens": 8000,
1684      "can_reason": true,
1685      "reasoning_levels": [
1686        "low",
1687        "medium",
1688        "high"
1689      ],
1690      "default_reasoning_effort": "medium",
1691      "supports_attachments": false
1692    },
1693    {
1694      "id": "inception/mercury-coder-small",
1695      "name": "Mercury Coder Small Beta",
1696      "cost_per_1m_in": 0.25,
1697      "cost_per_1m_out": 1,
1698      "cost_per_1m_in_cached": 0,
1699      "cost_per_1m_out_cached": 0,
1700      "context_window": 32000,
1701      "default_max_tokens": 8000,
1702      "can_reason": false,
1703      "supports_attachments": false
1704    },
1705    {
1706      "id": "xiaomi/mimo-v2-flash",
1707      "name": "MiMo V2 Flash",
1708      "cost_per_1m_in": 0.09,
1709      "cost_per_1m_out": 0.29,
1710      "cost_per_1m_in_cached": 0.045,
1711      "cost_per_1m_out_cached": 0,
1712      "context_window": 262144,
1713      "default_max_tokens": 8000,
1714      "can_reason": true,
1715      "reasoning_levels": [
1716        "low",
1717        "medium",
1718        "high"
1719      ],
1720      "default_reasoning_effort": "medium",
1721      "supports_attachments": false
1722    },
1723    {
1724      "id": "xiaomi/mimo-v2-pro",
1725      "name": "MiMo V2 Pro",
1726      "cost_per_1m_in": 1,
1727      "cost_per_1m_out": 3,
1728      "cost_per_1m_in_cached": 0.2,
1729      "cost_per_1m_out_cached": 0,
1730      "context_window": 1000000,
1731      "default_max_tokens": 8000,
1732      "can_reason": true,
1733      "reasoning_levels": [
1734        "low",
1735        "medium",
1736        "high"
1737      ],
1738      "default_reasoning_effort": "medium",
1739      "supports_attachments": false
1740    },
1741    {
1742      "id": "minimax/minimax-m2",
1743      "name": "MiniMax M2",
1744      "cost_per_1m_in": 0.3,
1745      "cost_per_1m_out": 1.2,
1746      "cost_per_1m_in_cached": 0.03,
1747      "cost_per_1m_out_cached": 0.375,
1748      "context_window": 205000,
1749      "default_max_tokens": 8000,
1750      "can_reason": true,
1751      "reasoning_levels": [
1752        "low",
1753        "medium",
1754        "high"
1755      ],
1756      "default_reasoning_effort": "medium",
1757      "supports_attachments": false
1758    },
1759    {
1760      "id": "minimax/minimax-m2.1",
1761      "name": "MiniMax M2.1",
1762      "cost_per_1m_in": 0.3,
1763      "cost_per_1m_out": 1.2,
1764      "cost_per_1m_in_cached": 0.03,
1765      "cost_per_1m_out_cached": 0.375,
1766      "context_window": 204800,
1767      "default_max_tokens": 8000,
1768      "can_reason": true,
1769      "reasoning_levels": [
1770        "low",
1771        "medium",
1772        "high"
1773      ],
1774      "default_reasoning_effort": "medium",
1775      "supports_attachments": false
1776    },
1777    {
1778      "id": "minimax/minimax-m2.1-lightning",
1779      "name": "MiniMax M2.1 Lightning",
1780      "cost_per_1m_in": 0.3,
1781      "cost_per_1m_out": 2.4,
1782      "cost_per_1m_in_cached": 0.03,
1783      "cost_per_1m_out_cached": 0.375,
1784      "context_window": 204800,
1785      "default_max_tokens": 8000,
1786      "can_reason": true,
1787      "reasoning_levels": [
1788        "low",
1789        "medium",
1790        "high"
1791      ],
1792      "default_reasoning_effort": "medium",
1793      "supports_attachments": false
1794    },
1795    {
1796      "id": "minimax/minimax-m2.5",
1797      "name": "MiniMax M2.5",
1798      "cost_per_1m_in": 0.3,
1799      "cost_per_1m_out": 1.2,
1800      "cost_per_1m_in_cached": 0.03,
1801      "cost_per_1m_out_cached": 0.375,
1802      "context_window": 204800,
1803      "default_max_tokens": 8000,
1804      "can_reason": true,
1805      "reasoning_levels": [
1806        "low",
1807        "medium",
1808        "high"
1809      ],
1810      "default_reasoning_effort": "medium",
1811      "supports_attachments": false
1812    },
1813    {
1814      "id": "minimax/minimax-m2.5-highspeed",
1815      "name": "MiniMax M2.5 High Speed",
1816      "cost_per_1m_in": 0.6,
1817      "cost_per_1m_out": 2.4,
1818      "cost_per_1m_in_cached": 0.03,
1819      "cost_per_1m_out_cached": 0.375,
1820      "context_window": 204800,
1821      "default_max_tokens": 8000,
1822      "can_reason": true,
1823      "reasoning_levels": [
1824        "low",
1825        "medium",
1826        "high"
1827      ],
1828      "default_reasoning_effort": "medium",
1829      "supports_attachments": false
1830    },
1831    {
1832      "id": "minimax/minimax-m2.7-highspeed",
1833      "name": "MiniMax M2.7 High Speed",
1834      "cost_per_1m_in": 0.6,
1835      "cost_per_1m_out": 2.4,
1836      "cost_per_1m_in_cached": 0.06,
1837      "cost_per_1m_out_cached": 0.375,
1838      "context_window": 204800,
1839      "default_max_tokens": 8000,
1840      "can_reason": true,
1841      "reasoning_levels": [
1842        "low",
1843        "medium",
1844        "high"
1845      ],
1846      "default_reasoning_effort": "medium",
1847      "supports_attachments": true
1848    },
1849    {
1850      "id": "minimax/minimax-m2.7",
1851      "name": "Minimax M2.7",
1852      "cost_per_1m_in": 0.3,
1853      "cost_per_1m_out": 1.2,
1854      "cost_per_1m_in_cached": 0.06,
1855      "cost_per_1m_out_cached": 0.375,
1856      "context_window": 204800,
1857      "default_max_tokens": 8000,
1858      "can_reason": true,
1859      "reasoning_levels": [
1860        "low",
1861        "medium",
1862        "high"
1863      ],
1864      "default_reasoning_effort": "medium",
1865      "supports_attachments": true
1866    },
1867    {
1868      "id": "mistral/ministral-3b",
1869      "name": "Ministral 3B",
1870      "cost_per_1m_in": 0.1,
1871      "cost_per_1m_out": 0.1,
1872      "cost_per_1m_in_cached": 0,
1873      "cost_per_1m_out_cached": 0,
1874      "context_window": 128000,
1875      "default_max_tokens": 4000,
1876      "can_reason": false,
1877      "supports_attachments": false
1878    },
1879    {
1880      "id": "mistral/ministral-8b",
1881      "name": "Ministral 8B",
1882      "cost_per_1m_in": 0.15,
1883      "cost_per_1m_out": 0.15,
1884      "cost_per_1m_in_cached": 0,
1885      "cost_per_1m_out_cached": 0,
1886      "context_window": 128000,
1887      "default_max_tokens": 4000,
1888      "can_reason": false,
1889      "supports_attachments": false
1890    },
1891    {
1892      "id": "mistral/codestral",
1893      "name": "Mistral Codestral",
1894      "cost_per_1m_in": 0.3,
1895      "cost_per_1m_out": 0.9,
1896      "cost_per_1m_in_cached": 0,
1897      "cost_per_1m_out_cached": 0,
1898      "context_window": 128000,
1899      "default_max_tokens": 4000,
1900      "can_reason": false,
1901      "supports_attachments": false
1902    },
1903    {
1904      "id": "mistral/mistral-medium",
1905      "name": "Mistral Medium 3.1",
1906      "cost_per_1m_in": 0.4,
1907      "cost_per_1m_out": 2,
1908      "cost_per_1m_in_cached": 0,
1909      "cost_per_1m_out_cached": 0,
1910      "context_window": 128000,
1911      "default_max_tokens": 8000,
1912      "can_reason": false,
1913      "supports_attachments": true
1914    },
1915    {
1916      "id": "mistral/mistral-small",
1917      "name": "Mistral Small",
1918      "cost_per_1m_in": 0.1,
1919      "cost_per_1m_out": 0.3,
1920      "cost_per_1m_in_cached": 0,
1921      "cost_per_1m_out_cached": 0,
1922      "context_window": 32000,
1923      "default_max_tokens": 4000,
1924      "can_reason": false,
1925      "supports_attachments": true
1926    },
1927    {
1928      "id": "nvidia/nemotron-nano-12b-v2-vl",
1929      "name": "Nvidia Nemotron Nano 12B V2 VL",
1930      "cost_per_1m_in": 0.2,
1931      "cost_per_1m_out": 0.6,
1932      "cost_per_1m_in_cached": 0,
1933      "cost_per_1m_out_cached": 0,
1934      "context_window": 131072,
1935      "default_max_tokens": 8000,
1936      "can_reason": true,
1937      "reasoning_levels": [
1938        "low",
1939        "medium",
1940        "high"
1941      ],
1942      "default_reasoning_effort": "medium",
1943      "supports_attachments": true
1944    },
1945    {
1946      "id": "nvidia/nemotron-nano-9b-v2",
1947      "name": "Nvidia Nemotron Nano 9B V2",
1948      "cost_per_1m_in": 0.06,
1949      "cost_per_1m_out": 0.23,
1950      "cost_per_1m_in_cached": 0,
1951      "cost_per_1m_out_cached": 0,
1952      "context_window": 131072,
1953      "default_max_tokens": 8000,
1954      "can_reason": true,
1955      "reasoning_levels": [
1956        "low",
1957        "medium",
1958        "high"
1959      ],
1960      "default_reasoning_effort": "medium",
1961      "supports_attachments": false
1962    },
1963    {
1964      "id": "mistral/pixtral-12b",
1965      "name": "Pixtral 12B 2409",
1966      "cost_per_1m_in": 0.15,
1967      "cost_per_1m_out": 0.15,
1968      "cost_per_1m_in_cached": 0,
1969      "cost_per_1m_out_cached": 0,
1970      "context_window": 128000,
1971      "default_max_tokens": 4000,
1972      "can_reason": false,
1973      "supports_attachments": true
1974    },
1975    {
1976      "id": "mistral/pixtral-large",
1977      "name": "Pixtral Large",
1978      "cost_per_1m_in": 2,
1979      "cost_per_1m_out": 6,
1980      "cost_per_1m_in_cached": 0,
1981      "cost_per_1m_out_cached": 0,
1982      "context_window": 128000,
1983      "default_max_tokens": 4000,
1984      "can_reason": false,
1985      "supports_attachments": true
1986    },
1987    {
1988      "id": "alibaba/qwen-3-32b",
1989      "name": "Qwen 3 32B",
1990      "cost_per_1m_in": 0.16,
1991      "cost_per_1m_out": 0.64,
1992      "cost_per_1m_in_cached": 0,
1993      "cost_per_1m_out_cached": 0,
1994      "context_window": 128000,
1995      "default_max_tokens": 8000,
1996      "can_reason": true,
1997      "reasoning_levels": [
1998        "low",
1999        "medium",
2000        "high"
2001      ],
2002      "default_reasoning_effort": "medium",
2003      "supports_attachments": false
2004    },
2005    {
2006      "id": "alibaba/qwen3-coder-30b-a3b",
2007      "name": "Qwen 3 Coder 30B A3B Instruct",
2008      "cost_per_1m_in": 0.15,
2009      "cost_per_1m_out": 0.6,
2010      "cost_per_1m_in_cached": 0,
2011      "cost_per_1m_out_cached": 0,
2012      "context_window": 262144,
2013      "default_max_tokens": 8000,
2014      "can_reason": true,
2015      "reasoning_levels": [
2016        "low",
2017        "medium",
2018        "high"
2019      ],
2020      "default_reasoning_effort": "medium",
2021      "supports_attachments": false
2022    },
2023    {
2024      "id": "alibaba/qwen3-max-thinking",
2025      "name": "Qwen 3 Max Thinking",
2026      "cost_per_1m_in": 1.2,
2027      "cost_per_1m_out": 6,
2028      "cost_per_1m_in_cached": 0.24,
2029      "cost_per_1m_out_cached": 0,
2030      "context_window": 256000,
2031      "default_max_tokens": 8000,
2032      "can_reason": true,
2033      "reasoning_levels": [
2034        "low",
2035        "medium",
2036        "high"
2037      ],
2038      "default_reasoning_effort": "medium",
2039      "supports_attachments": false
2040    },
2041    {
2042      "id": "alibaba/qwen3.5-flash",
2043      "name": "Qwen 3.5 Flash",
2044      "cost_per_1m_in": 0.1,
2045      "cost_per_1m_out": 0.4,
2046      "cost_per_1m_in_cached": 0.001,
2047      "cost_per_1m_out_cached": 0.125,
2048      "context_window": 1000000,
2049      "default_max_tokens": 8000,
2050      "can_reason": true,
2051      "reasoning_levels": [
2052        "low",
2053        "medium",
2054        "high"
2055      ],
2056      "default_reasoning_effort": "medium",
2057      "supports_attachments": true
2058    },
2059    {
2060      "id": "alibaba/qwen3.5-plus",
2061      "name": "Qwen 3.5 Plus",
2062      "cost_per_1m_in": 0.4,
2063      "cost_per_1m_out": 2.4,
2064      "cost_per_1m_in_cached": 0.04,
2065      "cost_per_1m_out_cached": 0.5,
2066      "context_window": 1000000,
2067      "default_max_tokens": 8000,
2068      "can_reason": true,
2069      "reasoning_levels": [
2070        "low",
2071        "medium",
2072        "high"
2073      ],
2074      "default_reasoning_effort": "medium",
2075      "supports_attachments": true
2076    },
2077    {
2078      "id": "alibaba/qwen3-235b-a22b-thinking",
2079      "name": "Qwen3 235B A22B Thinking 2507",
2080      "cost_per_1m_in": 0.23,
2081      "cost_per_1m_out": 2.3,
2082      "cost_per_1m_in_cached": 0.2,
2083      "cost_per_1m_out_cached": 0,
2084      "context_window": 262114,
2085      "default_max_tokens": 8000,
2086      "can_reason": true,
2087      "reasoning_levels": [
2088        "low",
2089        "medium",
2090        "high"
2091      ],
2092      "default_reasoning_effort": "medium",
2093      "supports_attachments": true
2094    },
2095    {
2096      "id": "alibaba/qwen3-coder",
2097      "name": "Qwen3 Coder 480B A35B Instruct",
2098      "cost_per_1m_in": 1.5,
2099      "cost_per_1m_out": 7.5,
2100      "cost_per_1m_in_cached": 0.3,
2101      "cost_per_1m_out_cached": 0,
2102      "context_window": 262144,
2103      "default_max_tokens": 8000,
2104      "can_reason": false,
2105      "supports_attachments": false
2106    },
2107    {
2108      "id": "alibaba/qwen3-coder-next",
2109      "name": "Qwen3 Coder Next",
2110      "cost_per_1m_in": 0.5,
2111      "cost_per_1m_out": 1.2,
2112      "cost_per_1m_in_cached": 0,
2113      "cost_per_1m_out_cached": 0,
2114      "context_window": 256000,
2115      "default_max_tokens": 8000,
2116      "can_reason": false,
2117      "supports_attachments": false
2118    },
2119    {
2120      "id": "alibaba/qwen3-coder-plus",
2121      "name": "Qwen3 Coder Plus",
2122      "cost_per_1m_in": 1,
2123      "cost_per_1m_out": 5,
2124      "cost_per_1m_in_cached": 0.2,
2125      "cost_per_1m_out_cached": 0,
2126      "context_window": 1000000,
2127      "default_max_tokens": 8000,
2128      "can_reason": false,
2129      "supports_attachments": false
2130    },
2131    {
2132      "id": "alibaba/qwen3-max",
2133      "name": "Qwen3 Max",
2134      "cost_per_1m_in": 1.2,
2135      "cost_per_1m_out": 6,
2136      "cost_per_1m_in_cached": 0.24,
2137      "cost_per_1m_out_cached": 0,
2138      "context_window": 262144,
2139      "default_max_tokens": 8000,
2140      "can_reason": false,
2141      "supports_attachments": false
2142    },
2143    {
2144      "id": "alibaba/qwen3-max-preview",
2145      "name": "Qwen3 Max Preview",
2146      "cost_per_1m_in": 1.2,
2147      "cost_per_1m_out": 6,
2148      "cost_per_1m_in_cached": 0.24,
2149      "cost_per_1m_out_cached": 0,
2150      "context_window": 262144,
2151      "default_max_tokens": 8000,
2152      "can_reason": false,
2153      "supports_attachments": false
2154    },
2155    {
2156      "id": "alibaba/qwen3-vl-thinking",
2157      "name": "Qwen3 VL 235B A22B Thinking",
2158      "cost_per_1m_in": 0.22,
2159      "cost_per_1m_out": 0.88,
2160      "cost_per_1m_in_cached": 0,
2161      "cost_per_1m_out_cached": 0,
2162      "context_window": 256000,
2163      "default_max_tokens": 8000,
2164      "can_reason": true,
2165      "reasoning_levels": [
2166        "low",
2167        "medium",
2168        "high"
2169      ],
2170      "default_reasoning_effort": "medium",
2171      "supports_attachments": true
2172    },
2173    {
2174      "id": "alibaba/qwen-3-14b",
2175      "name": "Qwen3-14B",
2176      "cost_per_1m_in": 0.12,
2177      "cost_per_1m_out": 0.24,
2178      "cost_per_1m_in_cached": 0,
2179      "cost_per_1m_out_cached": 0,
2180      "context_window": 40960,
2181      "default_max_tokens": 8000,
2182      "can_reason": true,
2183      "reasoning_levels": [
2184        "low",
2185        "medium",
2186        "high"
2187      ],
2188      "default_reasoning_effort": "medium",
2189      "supports_attachments": false
2190    },
2191    {
2192      "id": "alibaba/qwen-3-235b",
2193      "name": "Qwen3-235B-A22B",
2194      "cost_per_1m_in": 0.22,
2195      "cost_per_1m_out": 0.88,
2196      "cost_per_1m_in_cached": 0.11,
2197      "cost_per_1m_out_cached": 0,
2198      "context_window": 32768,
2199      "default_max_tokens": 8000,
2200      "can_reason": false,
2201      "supports_attachments": false
2202    },
2203    {
2204      "id": "alibaba/qwen-3-30b",
2205      "name": "Qwen3-30B-A3B",
2206      "cost_per_1m_in": 0.08,
2207      "cost_per_1m_out": 0.29,
2208      "cost_per_1m_in_cached": 0,
2209      "cost_per_1m_out_cached": 0,
2210      "context_window": 40960,
2211      "default_max_tokens": 8000,
2212      "can_reason": true,
2213      "reasoning_levels": [
2214        "low",
2215        "medium",
2216        "high"
2217      ],
2218      "default_reasoning_effort": "medium",
2219      "supports_attachments": false
2220    },
2221    {
2222      "id": "bytedance/seed-1.6",
2223      "name": "Seed 1.6",
2224      "cost_per_1m_in": 0.25,
2225      "cost_per_1m_out": 2,
2226      "cost_per_1m_in_cached": 0.05,
2227      "cost_per_1m_out_cached": 0,
2228      "context_window": 256000,
2229      "default_max_tokens": 8000,
2230      "can_reason": true,
2231      "reasoning_levels": [
2232        "low",
2233        "medium",
2234        "high"
2235      ],
2236      "default_reasoning_effort": "medium",
2237      "supports_attachments": false
2238    },
2239    {
2240      "id": "perplexity/sonar",
2241      "name": "Sonar",
2242      "cost_per_1m_in": 0,
2243      "cost_per_1m_out": 0,
2244      "cost_per_1m_in_cached": 0,
2245      "cost_per_1m_out_cached": 0,
2246      "context_window": 127000,
2247      "default_max_tokens": 8000,
2248      "can_reason": false,
2249      "supports_attachments": true
2250    },
2251    {
2252      "id": "perplexity/sonar-pro",
2253      "name": "Sonar Pro",
2254      "cost_per_1m_in": 0,
2255      "cost_per_1m_out": 0,
2256      "cost_per_1m_in_cached": 0,
2257      "cost_per_1m_out_cached": 0,
2258      "context_window": 200000,
2259      "default_max_tokens": 8000,
2260      "can_reason": false,
2261      "supports_attachments": true
2262    },
2263    {
2264      "id": "arcee-ai/trinity-large-preview",
2265      "name": "Trinity Large Preview",
2266      "cost_per_1m_in": 0.25,
2267      "cost_per_1m_out": 1,
2268      "cost_per_1m_in_cached": 0,
2269      "cost_per_1m_out_cached": 0,
2270      "context_window": 131000,
2271      "default_max_tokens": 8000,
2272      "can_reason": false,
2273      "supports_attachments": false
2274    },
2275    {
2276      "id": "openai/gpt-oss-120b",
2277      "name": "gpt-oss-120b",
2278      "cost_per_1m_in": 0.15,
2279      "cost_per_1m_out": 0.6,
2280      "cost_per_1m_in_cached": 0,
2281      "cost_per_1m_out_cached": 0,
2282      "context_window": 128000,
2283      "default_max_tokens": 8000,
2284      "can_reason": true,
2285      "reasoning_levels": [
2286        "low",
2287        "medium",
2288        "high"
2289      ],
2290      "default_reasoning_effort": "medium",
2291      "supports_attachments": false
2292    },
2293    {
2294      "id": "openai/gpt-oss-20b",
2295      "name": "gpt-oss-20b",
2296      "cost_per_1m_in": 0.07,
2297      "cost_per_1m_out": 0.3,
2298      "cost_per_1m_in_cached": 0,
2299      "cost_per_1m_out_cached": 0,
2300      "context_window": 128000,
2301      "default_max_tokens": 8000,
2302      "can_reason": true,
2303      "reasoning_levels": [
2304        "low",
2305        "medium",
2306        "high"
2307      ],
2308      "default_reasoning_effort": "medium",
2309      "supports_attachments": false
2310    },
2311    {
2312      "id": "openai/gpt-oss-safeguard-20b",
2313      "name": "gpt-oss-safeguard-20b",
2314      "cost_per_1m_in": 0.075,
2315      "cost_per_1m_out": 0.3,
2316      "cost_per_1m_in_cached": 0.037,
2317      "cost_per_1m_out_cached": 0,
2318      "context_window": 131072,
2319      "default_max_tokens": 8000,
2320      "can_reason": true,
2321      "reasoning_levels": [
2322        "low",
2323        "medium",
2324        "high"
2325      ],
2326      "default_reasoning_effort": "medium",
2327      "supports_attachments": false
2328    },
2329    {
2330      "id": "openai/o1",
2331      "name": "o1",
2332      "cost_per_1m_in": 15,
2333      "cost_per_1m_out": 60,
2334      "cost_per_1m_in_cached": 7.5,
2335      "cost_per_1m_out_cached": 0,
2336      "context_window": 200000,
2337      "default_max_tokens": 8000,
2338      "can_reason": true,
2339      "reasoning_levels": [
2340        "low",
2341        "medium",
2342        "high"
2343      ],
2344      "default_reasoning_effort": "medium",
2345      "supports_attachments": true
2346    },
2347    {
2348      "id": "openai/o3",
2349      "name": "o3",
2350      "cost_per_1m_in": 2,
2351      "cost_per_1m_out": 8,
2352      "cost_per_1m_in_cached": 0.5,
2353      "cost_per_1m_out_cached": 0,
2354      "context_window": 200000,
2355      "default_max_tokens": 8000,
2356      "can_reason": true,
2357      "reasoning_levels": [
2358        "low",
2359        "medium",
2360        "high"
2361      ],
2362      "default_reasoning_effort": "medium",
2363      "supports_attachments": true
2364    },
2365    {
2366      "id": "openai/o3-pro",
2367      "name": "o3 Pro",
2368      "cost_per_1m_in": 20,
2369      "cost_per_1m_out": 80,
2370      "cost_per_1m_in_cached": 0,
2371      "cost_per_1m_out_cached": 0,
2372      "context_window": 200000,
2373      "default_max_tokens": 8000,
2374      "can_reason": true,
2375      "reasoning_levels": [
2376        "low",
2377        "medium",
2378        "high"
2379      ],
2380      "default_reasoning_effort": "medium",
2381      "supports_attachments": true
2382    },
2383    {
2384      "id": "openai/o3-deep-research",
2385      "name": "o3-deep-research",
2386      "cost_per_1m_in": 10,
2387      "cost_per_1m_out": 40,
2388      "cost_per_1m_in_cached": 2.5,
2389      "cost_per_1m_out_cached": 0,
2390      "context_window": 200000,
2391      "default_max_tokens": 8000,
2392      "can_reason": true,
2393      "reasoning_levels": [
2394        "low",
2395        "medium",
2396        "high"
2397      ],
2398      "default_reasoning_effort": "medium",
2399      "supports_attachments": true
2400    },
2401    {
2402      "id": "openai/o3-mini",
2403      "name": "o3-mini",
2404      "cost_per_1m_in": 1.1,
2405      "cost_per_1m_out": 4.4,
2406      "cost_per_1m_in_cached": 0.55,
2407      "cost_per_1m_out_cached": 0,
2408      "context_window": 200000,
2409      "default_max_tokens": 8000,
2410      "can_reason": true,
2411      "reasoning_levels": [
2412        "low",
2413        "medium",
2414        "high"
2415      ],
2416      "default_reasoning_effort": "medium",
2417      "supports_attachments": false
2418    },
2419    {
2420      "id": "openai/o4-mini",
2421      "name": "o4-mini",
2422      "cost_per_1m_in": 1.1,
2423      "cost_per_1m_out": 4.4,
2424      "cost_per_1m_in_cached": 0.275,
2425      "cost_per_1m_out_cached": 0,
2426      "context_window": 200000,
2427      "default_max_tokens": 8000,
2428      "can_reason": true,
2429      "reasoning_levels": [
2430        "low",
2431        "medium",
2432        "high"
2433      ],
2434      "default_reasoning_effort": "medium",
2435      "supports_attachments": true
2436    }
2437  ],
2438  "default_headers": {
2439    "HTTP-Referer": "https://charm.land",
2440    "X-Title": "Crush"
2441  }
2442}