openrouter.json

   1{
   2  "name": "OpenRouter",
   3  "id": "openrouter",
   4  "api_key": "$OPENROUTER_API_KEY",
   5  "api_endpoint": "https://openrouter.ai/api/v1",
   6  "type": "openrouter",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-3.5-haiku",
   9  "models": [
  10    {
  11      "id": "ai21/jamba-large-1.7",
  12      "name": "AI21: Jamba Large 1.7",
  13      "cost_per_1m_in": 2,
  14      "cost_per_1m_out": 8,
  15      "cost_per_1m_in_cached": 0,
  16      "cost_per_1m_out_cached": 0,
  17      "context_window": 256000,
  18      "default_max_tokens": 2048,
  19      "can_reason": false,
  20      "supports_attachments": false
  21    },
  22    {
  23      "id": "allenai/olmo-3.1-32b-instruct",
  24      "name": "AllenAI: Olmo 3.1 32B Instruct",
  25      "cost_per_1m_in": 0.2,
  26      "cost_per_1m_out": 0.6,
  27      "cost_per_1m_in_cached": 0,
  28      "cost_per_1m_out_cached": 0,
  29      "context_window": 65536,
  30      "default_max_tokens": 6553,
  31      "can_reason": false,
  32      "supports_attachments": false
  33    },
  34    {
  35      "id": "amazon/nova-2-lite-v1",
  36      "name": "Amazon: Nova 2 Lite",
  37      "cost_per_1m_in": 0.3,
  38      "cost_per_1m_out": 2.5,
  39      "cost_per_1m_in_cached": 0,
  40      "cost_per_1m_out_cached": 0,
  41      "context_window": 1000000,
  42      "default_max_tokens": 32767,
  43      "can_reason": true,
  44      "reasoning_levels": [
  45        "low",
  46        "medium",
  47        "high"
  48      ],
  49      "default_reasoning_effort": "medium",
  50      "supports_attachments": true
  51    },
  52    {
  53      "id": "amazon/nova-lite-v1",
  54      "name": "Amazon: Nova Lite 1.0",
  55      "cost_per_1m_in": 0.06,
  56      "cost_per_1m_out": 0.24,
  57      "cost_per_1m_in_cached": 0,
  58      "cost_per_1m_out_cached": 0,
  59      "context_window": 300000,
  60      "default_max_tokens": 2560,
  61      "can_reason": false,
  62      "supports_attachments": true
  63    },
  64    {
  65      "id": "amazon/nova-micro-v1",
  66      "name": "Amazon: Nova Micro 1.0",
  67      "cost_per_1m_in": 0.035,
  68      "cost_per_1m_out": 0.14,
  69      "cost_per_1m_in_cached": 0,
  70      "cost_per_1m_out_cached": 0,
  71      "context_window": 128000,
  72      "default_max_tokens": 2560,
  73      "can_reason": false,
  74      "supports_attachments": false
  75    },
  76    {
  77      "id": "amazon/nova-premier-v1",
  78      "name": "Amazon: Nova Premier 1.0",
  79      "cost_per_1m_in": 2.5,
  80      "cost_per_1m_out": 12.5,
  81      "cost_per_1m_in_cached": 0,
  82      "cost_per_1m_out_cached": 0.625,
  83      "context_window": 1000000,
  84      "default_max_tokens": 16000,
  85      "can_reason": false,
  86      "supports_attachments": true
  87    },
  88    {
  89      "id": "amazon/nova-pro-v1",
  90      "name": "Amazon: Nova Pro 1.0",
  91      "cost_per_1m_in": 0.8,
  92      "cost_per_1m_out": 3.2,
  93      "cost_per_1m_in_cached": 0,
  94      "cost_per_1m_out_cached": 0,
  95      "context_window": 300000,
  96      "default_max_tokens": 2560,
  97      "can_reason": false,
  98      "supports_attachments": true
  99    },
 100    {
 101      "id": "anthropic/claude-3-haiku",
 102      "name": "Anthropic: Claude 3 Haiku",
 103      "cost_per_1m_in": 0.25,
 104      "cost_per_1m_out": 1.25,
 105      "cost_per_1m_in_cached": 0.3,
 106      "cost_per_1m_out_cached": 0.03,
 107      "context_window": 200000,
 108      "default_max_tokens": 2048,
 109      "can_reason": false,
 110      "supports_attachments": true
 111    },
 112    {
 113      "id": "anthropic/claude-3.5-haiku",
 114      "name": "Anthropic: Claude 3.5 Haiku",
 115      "cost_per_1m_in": 0.8,
 116      "cost_per_1m_out": 4,
 117      "cost_per_1m_in_cached": 1,
 118      "cost_per_1m_out_cached": 0.08,
 119      "context_window": 200000,
 120      "default_max_tokens": 4096,
 121      "can_reason": false,
 122      "supports_attachments": true
 123    },
 124    {
 125      "id": "anthropic/claude-3.7-sonnet",
 126      "name": "Anthropic: Claude 3.7 Sonnet",
 127      "cost_per_1m_in": 3,
 128      "cost_per_1m_out": 15,
 129      "cost_per_1m_in_cached": 3.75,
 130      "cost_per_1m_out_cached": 0.3,
 131      "context_window": 200000,
 132      "default_max_tokens": 64000,
 133      "can_reason": true,
 134      "reasoning_levels": [
 135        "low",
 136        "medium",
 137        "high"
 138      ],
 139      "default_reasoning_effort": "medium",
 140      "supports_attachments": true
 141    },
 142    {
 143      "id": "anthropic/claude-3.7-sonnet:thinking",
 144      "name": "Anthropic: Claude 3.7 Sonnet (thinking)",
 145      "cost_per_1m_in": 3,
 146      "cost_per_1m_out": 15,
 147      "cost_per_1m_in_cached": 3.75,
 148      "cost_per_1m_out_cached": 0.3,
 149      "context_window": 200000,
 150      "default_max_tokens": 32000,
 151      "can_reason": true,
 152      "reasoning_levels": [
 153        "low",
 154        "medium",
 155        "high"
 156      ],
 157      "default_reasoning_effort": "medium",
 158      "supports_attachments": true
 159    },
 160    {
 161      "id": "anthropic/claude-haiku-4.5",
 162      "name": "Anthropic: Claude Haiku 4.5",
 163      "cost_per_1m_in": 1,
 164      "cost_per_1m_out": 5,
 165      "cost_per_1m_in_cached": 1.25,
 166      "cost_per_1m_out_cached": 0.1,
 167      "context_window": 200000,
 168      "default_max_tokens": 32000,
 169      "can_reason": true,
 170      "reasoning_levels": [
 171        "low",
 172        "medium",
 173        "high"
 174      ],
 175      "default_reasoning_effort": "medium",
 176      "supports_attachments": true
 177    },
 178    {
 179      "id": "anthropic/claude-opus-4",
 180      "name": "Anthropic: Claude Opus 4",
 181      "cost_per_1m_in": 15,
 182      "cost_per_1m_out": 75,
 183      "cost_per_1m_in_cached": 18.75,
 184      "cost_per_1m_out_cached": 1.5,
 185      "context_window": 200000,
 186      "default_max_tokens": 16000,
 187      "can_reason": true,
 188      "reasoning_levels": [
 189        "low",
 190        "medium",
 191        "high"
 192      ],
 193      "default_reasoning_effort": "medium",
 194      "supports_attachments": true
 195    },
 196    {
 197      "id": "anthropic/claude-opus-4.1",
 198      "name": "Anthropic: Claude Opus 4.1",
 199      "cost_per_1m_in": 15,
 200      "cost_per_1m_out": 75,
 201      "cost_per_1m_in_cached": 18.75,
 202      "cost_per_1m_out_cached": 1.5,
 203      "context_window": 200000,
 204      "default_max_tokens": 16000,
 205      "can_reason": true,
 206      "reasoning_levels": [
 207        "low",
 208        "medium",
 209        "high"
 210      ],
 211      "default_reasoning_effort": "medium",
 212      "supports_attachments": true
 213    },
 214    {
 215      "id": "anthropic/claude-opus-4.5",
 216      "name": "Anthropic: Claude Opus 4.5",
 217      "cost_per_1m_in": 5,
 218      "cost_per_1m_out": 25,
 219      "cost_per_1m_in_cached": 6.25,
 220      "cost_per_1m_out_cached": 0.5,
 221      "context_window": 200000,
 222      "default_max_tokens": 32000,
 223      "can_reason": true,
 224      "reasoning_levels": [
 225        "low",
 226        "medium",
 227        "high"
 228      ],
 229      "default_reasoning_effort": "medium",
 230      "supports_attachments": true
 231    },
 232    {
 233      "id": "anthropic/claude-opus-4.6",
 234      "name": "Anthropic: Claude Opus 4.6",
 235      "cost_per_1m_in": 5,
 236      "cost_per_1m_out": 25,
 237      "cost_per_1m_in_cached": 6.25,
 238      "cost_per_1m_out_cached": 0.5,
 239      "context_window": 1000000,
 240      "default_max_tokens": 64000,
 241      "can_reason": true,
 242      "reasoning_levels": [
 243        "low",
 244        "medium",
 245        "high"
 246      ],
 247      "default_reasoning_effort": "medium",
 248      "supports_attachments": true
 249    },
 250    {
 251      "id": "anthropic/claude-opus-4.6-fast",
 252      "name": "Anthropic: Claude Opus 4.6 (Fast)",
 253      "cost_per_1m_in": 30,
 254      "cost_per_1m_out": 150,
 255      "cost_per_1m_in_cached": 37.5,
 256      "cost_per_1m_out_cached": 3,
 257      "context_window": 1000000,
 258      "default_max_tokens": 64000,
 259      "can_reason": true,
 260      "reasoning_levels": [
 261        "low",
 262        "medium",
 263        "high"
 264      ],
 265      "default_reasoning_effort": "medium",
 266      "supports_attachments": true
 267    },
 268    {
 269      "id": "anthropic/claude-opus-4.7",
 270      "name": "Anthropic: Claude Opus 4.7",
 271      "cost_per_1m_in": 5,
 272      "cost_per_1m_out": 25,
 273      "cost_per_1m_in_cached": 6.25,
 274      "cost_per_1m_out_cached": 0.5,
 275      "context_window": 1000000,
 276      "default_max_tokens": 64000,
 277      "can_reason": true,
 278      "reasoning_levels": [
 279        "low",
 280        "medium",
 281        "high"
 282      ],
 283      "default_reasoning_effort": "medium",
 284      "supports_attachments": true
 285    },
 286    {
 287      "id": "anthropic/claude-sonnet-4",
 288      "name": "Anthropic: Claude Sonnet 4",
 289      "cost_per_1m_in": 3,
 290      "cost_per_1m_out": 15,
 291      "cost_per_1m_in_cached": 3.75,
 292      "cost_per_1m_out_cached": 0.3,
 293      "context_window": 1000000,
 294      "default_max_tokens": 32000,
 295      "can_reason": true,
 296      "reasoning_levels": [
 297        "low",
 298        "medium",
 299        "high"
 300      ],
 301      "default_reasoning_effort": "medium",
 302      "supports_attachments": true
 303    },
 304    {
 305      "id": "anthropic/claude-sonnet-4.5",
 306      "name": "Anthropic: Claude Sonnet 4.5",
 307      "cost_per_1m_in": 3,
 308      "cost_per_1m_out": 15,
 309      "cost_per_1m_in_cached": 3.75,
 310      "cost_per_1m_out_cached": 0.3,
 311      "context_window": 1000000,
 312      "default_max_tokens": 32000,
 313      "can_reason": true,
 314      "reasoning_levels": [
 315        "low",
 316        "medium",
 317        "high"
 318      ],
 319      "default_reasoning_effort": "medium",
 320      "supports_attachments": true
 321    },
 322    {
 323      "id": "anthropic/claude-sonnet-4.6",
 324      "name": "Anthropic: Claude Sonnet 4.6",
 325      "cost_per_1m_in": 3,
 326      "cost_per_1m_out": 15,
 327      "cost_per_1m_in_cached": 3.75,
 328      "cost_per_1m_out_cached": 0.3,
 329      "context_window": 1000000,
 330      "default_max_tokens": 64000,
 331      "can_reason": true,
 332      "reasoning_levels": [
 333        "low",
 334        "medium",
 335        "high"
 336      ],
 337      "default_reasoning_effort": "medium",
 338      "supports_attachments": true
 339    },
 340    {
 341      "id": "arcee-ai/trinity-large-preview:free",
 342      "name": "Arcee AI: Trinity Large Preview (free)",
 343      "cost_per_1m_in": 0,
 344      "cost_per_1m_out": 0,
 345      "cost_per_1m_in_cached": 0,
 346      "cost_per_1m_out_cached": 0,
 347      "context_window": 131000,
 348      "default_max_tokens": 13100,
 349      "can_reason": false,
 350      "supports_attachments": false
 351    },
 352    {
 353      "id": "arcee-ai/trinity-large-thinking",
 354      "name": "Arcee AI: Trinity Large Thinking",
 355      "cost_per_1m_in": 0.22,
 356      "cost_per_1m_out": 0.85,
 357      "cost_per_1m_in_cached": 0,
 358      "cost_per_1m_out_cached": 0.06,
 359      "context_window": 262144,
 360      "default_max_tokens": 131072,
 361      "can_reason": true,
 362      "reasoning_levels": [
 363        "low",
 364        "medium",
 365        "high"
 366      ],
 367      "default_reasoning_effort": "medium",
 368      "supports_attachments": false
 369    },
 370    {
 371      "id": "arcee-ai/trinity-mini",
 372      "name": "Arcee AI: Trinity Mini",
 373      "cost_per_1m_in": 0.045,
 374      "cost_per_1m_out": 0.15,
 375      "cost_per_1m_in_cached": 0,
 376      "cost_per_1m_out_cached": 0,
 377      "context_window": 131072,
 378      "default_max_tokens": 65536,
 379      "can_reason": true,
 380      "reasoning_levels": [
 381        "low",
 382        "medium",
 383        "high"
 384      ],
 385      "default_reasoning_effort": "medium",
 386      "supports_attachments": false
 387    },
 388    {
 389      "id": "arcee-ai/virtuoso-large",
 390      "name": "Arcee AI: Virtuoso Large",
 391      "cost_per_1m_in": 0.75,
 392      "cost_per_1m_out": 1.2,
 393      "cost_per_1m_in_cached": 0,
 394      "cost_per_1m_out_cached": 0,
 395      "context_window": 131072,
 396      "default_max_tokens": 32000,
 397      "can_reason": false,
 398      "supports_attachments": false
 399    },
 400    {
 401      "id": "baidu/ernie-4.5-21b-a3b",
 402      "name": "Baidu: ERNIE 4.5 21B A3B",
 403      "cost_per_1m_in": 0.07,
 404      "cost_per_1m_out": 0.28,
 405      "cost_per_1m_in_cached": 0,
 406      "cost_per_1m_out_cached": 0,
 407      "context_window": 120000,
 408      "default_max_tokens": 4000,
 409      "can_reason": false,
 410      "supports_attachments": false
 411    },
 412    {
 413      "id": "baidu/ernie-4.5-vl-28b-a3b",
 414      "name": "Baidu: ERNIE 4.5 VL 28B A3B",
 415      "cost_per_1m_in": 0.14,
 416      "cost_per_1m_out": 0.56,
 417      "cost_per_1m_in_cached": 0,
 418      "cost_per_1m_out_cached": 0,
 419      "context_window": 30000,
 420      "default_max_tokens": 4000,
 421      "can_reason": true,
 422      "reasoning_levels": [
 423        "low",
 424        "medium",
 425        "high"
 426      ],
 427      "default_reasoning_effort": "medium",
 428      "supports_attachments": true
 429    },
 430    {
 431      "id": "bytedance-seed/seed-1.6",
 432      "name": "ByteDance Seed: Seed 1.6",
 433      "cost_per_1m_in": 0.25,
 434      "cost_per_1m_out": 2,
 435      "cost_per_1m_in_cached": 0,
 436      "cost_per_1m_out_cached": 0,
 437      "context_window": 262144,
 438      "default_max_tokens": 16384,
 439      "can_reason": true,
 440      "reasoning_levels": [
 441        "low",
 442        "medium",
 443        "high"
 444      ],
 445      "default_reasoning_effort": "medium",
 446      "supports_attachments": true
 447    },
 448    {
 449      "id": "bytedance-seed/seed-1.6-flash",
 450      "name": "ByteDance Seed: Seed 1.6 Flash",
 451      "cost_per_1m_in": 0.075,
 452      "cost_per_1m_out": 0.3,
 453      "cost_per_1m_in_cached": 0,
 454      "cost_per_1m_out_cached": 0,
 455      "context_window": 262144,
 456      "default_max_tokens": 16384,
 457      "can_reason": true,
 458      "reasoning_levels": [
 459        "low",
 460        "medium",
 461        "high"
 462      ],
 463      "default_reasoning_effort": "medium",
 464      "supports_attachments": true
 465    },
 466    {
 467      "id": "bytedance-seed/seed-2.0-lite",
 468      "name": "ByteDance Seed: Seed-2.0-Lite",
 469      "cost_per_1m_in": 0.25,
 470      "cost_per_1m_out": 2,
 471      "cost_per_1m_in_cached": 0,
 472      "cost_per_1m_out_cached": 0,
 473      "context_window": 262144,
 474      "default_max_tokens": 65536,
 475      "can_reason": true,
 476      "reasoning_levels": [
 477        "low",
 478        "medium",
 479        "high"
 480      ],
 481      "default_reasoning_effort": "medium",
 482      "supports_attachments": true
 483    },
 484    {
 485      "id": "bytedance-seed/seed-2.0-mini",
 486      "name": "ByteDance Seed: Seed-2.0-Mini",
 487      "cost_per_1m_in": 0.1,
 488      "cost_per_1m_out": 0.4,
 489      "cost_per_1m_in_cached": 0,
 490      "cost_per_1m_out_cached": 0,
 491      "context_window": 262144,
 492      "default_max_tokens": 65536,
 493      "can_reason": true,
 494      "reasoning_levels": [
 495        "low",
 496        "medium",
 497        "high"
 498      ],
 499      "default_reasoning_effort": "medium",
 500      "supports_attachments": true
 501    },
 502    {
 503      "id": "cohere/command-r-08-2024",
 504      "name": "Cohere: Command R (08-2024)",
 505      "cost_per_1m_in": 0.15,
 506      "cost_per_1m_out": 0.6,
 507      "cost_per_1m_in_cached": 0,
 508      "cost_per_1m_out_cached": 0,
 509      "context_window": 128000,
 510      "default_max_tokens": 2000,
 511      "can_reason": false,
 512      "supports_attachments": false
 513    },
 514    {
 515      "id": "cohere/command-r-plus-08-2024",
 516      "name": "Cohere: Command R+ (08-2024)",
 517      "cost_per_1m_in": 2.5,
 518      "cost_per_1m_out": 10,
 519      "cost_per_1m_in_cached": 0,
 520      "cost_per_1m_out_cached": 0,
 521      "context_window": 128000,
 522      "default_max_tokens": 2000,
 523      "can_reason": false,
 524      "supports_attachments": false
 525    },
 526    {
 527      "id": "deepseek/deepseek-chat",
 528      "name": "DeepSeek: DeepSeek V3",
 529      "cost_per_1m_in": 0.4,
 530      "cost_per_1m_out": 1.3,
 531      "cost_per_1m_in_cached": 0,
 532      "cost_per_1m_out_cached": 0,
 533      "context_window": 64000,
 534      "default_max_tokens": 8000,
 535      "can_reason": false,
 536      "supports_attachments": false
 537    },
 538    {
 539      "id": "deepseek/deepseek-chat-v3-0324",
 540      "name": "DeepSeek: DeepSeek V3 0324",
 541      "cost_per_1m_in": 0.25,
 542      "cost_per_1m_out": 1,
 543      "cost_per_1m_in_cached": 0,
 544      "cost_per_1m_out_cached": 0,
 545      "context_window": 163840,
 546      "default_max_tokens": 81920,
 547      "can_reason": false,
 548      "supports_attachments": false
 549    },
 550    {
 551      "id": "deepseek/deepseek-chat-v3.1",
 552      "name": "DeepSeek: DeepSeek V3.1",
 553      "cost_per_1m_in": 0.21,
 554      "cost_per_1m_out": 0.79,
 555      "cost_per_1m_in_cached": 0,
 556      "cost_per_1m_out_cached": 0.13,
 557      "context_window": 163840,
 558      "default_max_tokens": 16384,
 559      "can_reason": true,
 560      "reasoning_levels": [
 561        "low",
 562        "medium",
 563        "high"
 564      ],
 565      "default_reasoning_effort": "medium",
 566      "supports_attachments": false
 567    },
 568    {
 569      "id": "deepseek/deepseek-v3.1-terminus",
 570      "name": "DeepSeek: DeepSeek V3.1 Terminus",
 571      "cost_per_1m_in": 0.21,
 572      "cost_per_1m_out": 0.79,
 573      "cost_per_1m_in_cached": 0,
 574      "cost_per_1m_out_cached": 0.13,
 575      "context_window": 163840,
 576      "default_max_tokens": 16384,
 577      "can_reason": true,
 578      "reasoning_levels": [
 579        "low",
 580        "medium",
 581        "high"
 582      ],
 583      "default_reasoning_effort": "medium",
 584      "supports_attachments": false
 585    },
 586    {
 587      "id": "deepseek/deepseek-v3.2",
 588      "name": "DeepSeek: DeepSeek V3.2",
 589      "cost_per_1m_in": 0.26,
 590      "cost_per_1m_out": 0.38,
 591      "cost_per_1m_in_cached": 0,
 592      "cost_per_1m_out_cached": 0.13,
 593      "context_window": 163840,
 594      "default_max_tokens": 16384,
 595      "can_reason": true,
 596      "reasoning_levels": [
 597        "low",
 598        "medium",
 599        "high"
 600      ],
 601      "default_reasoning_effort": "medium",
 602      "supports_attachments": false
 603    },
 604    {
 605      "id": "deepseek/deepseek-v3.2-exp",
 606      "name": "DeepSeek: DeepSeek V3.2 Exp",
 607      "cost_per_1m_in": 0.27,
 608      "cost_per_1m_out": 0.41,
 609      "cost_per_1m_in_cached": 0,
 610      "cost_per_1m_out_cached": 0.27,
 611      "context_window": 163840,
 612      "default_max_tokens": 81920,
 613      "can_reason": true,
 614      "reasoning_levels": [
 615        "low",
 616        "medium",
 617        "high"
 618      ],
 619      "default_reasoning_effort": "medium",
 620      "supports_attachments": false
 621    },
 622    {
 623      "id": "deepseek/deepseek-r1",
 624      "name": "DeepSeek: R1",
 625      "cost_per_1m_in": 0.7,
 626      "cost_per_1m_out": 2.5,
 627      "cost_per_1m_in_cached": 0,
 628      "cost_per_1m_out_cached": 0,
 629      "context_window": 64000,
 630      "default_max_tokens": 8000,
 631      "can_reason": true,
 632      "reasoning_levels": [
 633        "low",
 634        "medium",
 635        "high"
 636      ],
 637      "default_reasoning_effort": "medium",
 638      "supports_attachments": false
 639    },
 640    {
 641      "id": "deepseek/deepseek-r1-0528",
 642      "name": "DeepSeek: R1 0528",
 643      "cost_per_1m_in": 0.55,
 644      "cost_per_1m_out": 2.15,
 645      "cost_per_1m_in_cached": 0,
 646      "cost_per_1m_out_cached": 0.35,
 647      "context_window": 131072,
 648      "default_max_tokens": 65536,
 649      "can_reason": true,
 650      "reasoning_levels": [
 651        "low",
 652        "medium",
 653        "high"
 654      ],
 655      "default_reasoning_effort": "medium",
 656      "supports_attachments": false
 657    },
 658    {
 659      "id": "openrouter/elephant-alpha",
 660      "name": "Elephant",
 661      "cost_per_1m_in": 0,
 662      "cost_per_1m_out": 0,
 663      "cost_per_1m_in_cached": 0,
 664      "cost_per_1m_out_cached": 0,
 665      "context_window": 262144,
 666      "default_max_tokens": 16384,
 667      "can_reason": false,
 668      "supports_attachments": false
 669    },
 670    {
 671      "id": "essentialai/rnj-1-instruct",
 672      "name": "EssentialAI: Rnj 1 Instruct",
 673      "cost_per_1m_in": 0.15,
 674      "cost_per_1m_out": 0.15,
 675      "cost_per_1m_in_cached": 0,
 676      "cost_per_1m_out_cached": 0,
 677      "context_window": 32768,
 678      "default_max_tokens": 3276,
 679      "can_reason": false,
 680      "supports_attachments": false
 681    },
 682    {
 683      "id": "google/gemini-2.0-flash-001",
 684      "name": "Google: Gemini 2.0 Flash",
 685      "cost_per_1m_in": 0.1,
 686      "cost_per_1m_out": 0.4,
 687      "cost_per_1m_in_cached": 0.08333,
 688      "cost_per_1m_out_cached": 0.025,
 689      "context_window": 1048576,
 690      "default_max_tokens": 4096,
 691      "can_reason": false,
 692      "supports_attachments": true
 693    },
 694    {
 695      "id": "google/gemini-2.0-flash-lite-001",
 696      "name": "Google: Gemini 2.0 Flash Lite",
 697      "cost_per_1m_in": 0.075,
 698      "cost_per_1m_out": 0.3,
 699      "cost_per_1m_in_cached": 0,
 700      "cost_per_1m_out_cached": 0,
 701      "context_window": 1048576,
 702      "default_max_tokens": 4096,
 703      "can_reason": false,
 704      "supports_attachments": true
 705    },
 706    {
 707      "id": "google/gemini-2.5-flash",
 708      "name": "Google: Gemini 2.5 Flash",
 709      "cost_per_1m_in": 0.3,
 710      "cost_per_1m_out": 2.5,
 711      "cost_per_1m_in_cached": 0.08333,
 712      "cost_per_1m_out_cached": 0.03,
 713      "context_window": 1048576,
 714      "default_max_tokens": 32767,
 715      "can_reason": true,
 716      "reasoning_levels": [
 717        "low",
 718        "medium",
 719        "high"
 720      ],
 721      "default_reasoning_effort": "medium",
 722      "supports_attachments": true
 723    },
 724    {
 725      "id": "google/gemini-2.5-flash-lite",
 726      "name": "Google: Gemini 2.5 Flash Lite",
 727      "cost_per_1m_in": 0.1,
 728      "cost_per_1m_out": 0.4,
 729      "cost_per_1m_in_cached": 0.08333,
 730      "cost_per_1m_out_cached": 0.01,
 731      "context_window": 1048576,
 732      "default_max_tokens": 32767,
 733      "can_reason": true,
 734      "reasoning_levels": [
 735        "low",
 736        "medium",
 737        "high"
 738      ],
 739      "default_reasoning_effort": "medium",
 740      "supports_attachments": true
 741    },
 742    {
 743      "id": "google/gemini-2.5-flash-lite-preview-09-2025",
 744      "name": "Google: Gemini 2.5 Flash Lite Preview 09-2025",
 745      "cost_per_1m_in": 0.1,
 746      "cost_per_1m_out": 0.4,
 747      "cost_per_1m_in_cached": 0.08333,
 748      "cost_per_1m_out_cached": 0.01,
 749      "context_window": 1048576,
 750      "default_max_tokens": 32767,
 751      "can_reason": true,
 752      "reasoning_levels": [
 753        "low",
 754        "medium",
 755        "high"
 756      ],
 757      "default_reasoning_effort": "medium",
 758      "supports_attachments": true
 759    },
 760    {
 761      "id": "google/gemini-2.5-pro",
 762      "name": "Google: Gemini 2.5 Pro",
 763      "cost_per_1m_in": 1.25,
 764      "cost_per_1m_out": 10,
 765      "cost_per_1m_in_cached": 0.375,
 766      "cost_per_1m_out_cached": 0.125,
 767      "context_window": 1048576,
 768      "default_max_tokens": 32768,
 769      "can_reason": true,
 770      "reasoning_levels": [
 771        "low",
 772        "medium",
 773        "high"
 774      ],
 775      "default_reasoning_effort": "medium",
 776      "supports_attachments": true
 777    },
 778    {
 779      "id": "google/gemini-2.5-pro-preview-05-06",
 780      "name": "Google: Gemini 2.5 Pro Preview 05-06",
 781      "cost_per_1m_in": 1.25,
 782      "cost_per_1m_out": 10,
 783      "cost_per_1m_in_cached": 0.375,
 784      "cost_per_1m_out_cached": 0.125,
 785      "context_window": 1048576,
 786      "default_max_tokens": 32768,
 787      "can_reason": true,
 788      "reasoning_levels": [
 789        "low",
 790        "medium",
 791        "high"
 792      ],
 793      "default_reasoning_effort": "medium",
 794      "supports_attachments": true
 795    },
 796    {
 797      "id": "google/gemini-2.5-pro-preview",
 798      "name": "Google: Gemini 2.5 Pro Preview 06-05",
 799      "cost_per_1m_in": 1.25,
 800      "cost_per_1m_out": 10,
 801      "cost_per_1m_in_cached": 0.375,
 802      "cost_per_1m_out_cached": 0.125,
 803      "context_window": 1048576,
 804      "default_max_tokens": 32768,
 805      "can_reason": true,
 806      "reasoning_levels": [
 807        "low",
 808        "medium",
 809        "high"
 810      ],
 811      "default_reasoning_effort": "medium",
 812      "supports_attachments": true
 813    },
 814    {
 815      "id": "google/gemini-3-flash-preview",
 816      "name": "Google: Gemini 3 Flash Preview",
 817      "cost_per_1m_in": 0.5,
 818      "cost_per_1m_out": 3,
 819      "cost_per_1m_in_cached": 0.08333,
 820      "cost_per_1m_out_cached": 0.05,
 821      "context_window": 1048576,
 822      "default_max_tokens": 32768,
 823      "can_reason": true,
 824      "reasoning_levels": [
 825        "low",
 826        "medium",
 827        "high"
 828      ],
 829      "default_reasoning_effort": "medium",
 830      "supports_attachments": true
 831    },
 832    {
 833      "id": "google/gemini-3.1-flash-lite-preview",
 834      "name": "Google: Gemini 3.1 Flash Lite Preview",
 835      "cost_per_1m_in": 0.25,
 836      "cost_per_1m_out": 1.5,
 837      "cost_per_1m_in_cached": 0.08333,
 838      "cost_per_1m_out_cached": 0.025,
 839      "context_window": 1048576,
 840      "default_max_tokens": 32768,
 841      "can_reason": true,
 842      "reasoning_levels": [
 843        "low",
 844        "medium",
 845        "high"
 846      ],
 847      "default_reasoning_effort": "medium",
 848      "supports_attachments": true
 849    },
 850    {
 851      "id": "google/gemini-3.1-pro-preview",
 852      "name": "Google: Gemini 3.1 Pro Preview",
 853      "cost_per_1m_in": 2,
 854      "cost_per_1m_out": 12,
 855      "cost_per_1m_in_cached": 0.375,
 856      "cost_per_1m_out_cached": 0.2,
 857      "context_window": 1048576,
 858      "default_max_tokens": 32768,
 859      "can_reason": true,
 860      "reasoning_levels": [
 861        "low",
 862        "medium",
 863        "high"
 864      ],
 865      "default_reasoning_effort": "medium",
 866      "supports_attachments": true
 867    },
 868    {
 869      "id": "google/gemini-3.1-pro-preview-customtools",
 870      "name": "Google: Gemini 3.1 Pro Preview Custom Tools",
 871      "cost_per_1m_in": 2,
 872      "cost_per_1m_out": 12,
 873      "cost_per_1m_in_cached": 0.375,
 874      "cost_per_1m_out_cached": 0.2,
 875      "context_window": 1048576,
 876      "default_max_tokens": 32768,
 877      "can_reason": true,
 878      "reasoning_levels": [
 879        "low",
 880        "medium",
 881        "high"
 882      ],
 883      "default_reasoning_effort": "medium",
 884      "supports_attachments": true
 885    },
 886    {
 887      "id": "google/gemma-4-26b-a4b-it",
 888      "name": "Google: Gemma 4 26B A4B ",
 889      "cost_per_1m_in": 0.13,
 890      "cost_per_1m_out": 0.4,
 891      "cost_per_1m_in_cached": 0,
 892      "cost_per_1m_out_cached": 0,
 893      "context_window": 262144,
 894      "default_max_tokens": 65536,
 895      "can_reason": true,
 896      "reasoning_levels": [
 897        "low",
 898        "medium",
 899        "high"
 900      ],
 901      "default_reasoning_effort": "medium",
 902      "supports_attachments": true
 903    },
 904    {
 905      "id": "google/gemma-4-26b-a4b-it:free",
 906      "name": "Google: Gemma 4 26B A4B  (free)",
 907      "cost_per_1m_in": 0,
 908      "cost_per_1m_out": 0,
 909      "cost_per_1m_in_cached": 0,
 910      "cost_per_1m_out_cached": 0,
 911      "context_window": 262144,
 912      "default_max_tokens": 16384,
 913      "can_reason": true,
 914      "reasoning_levels": [
 915        "low",
 916        "medium",
 917        "high"
 918      ],
 919      "default_reasoning_effort": "medium",
 920      "supports_attachments": true
 921    },
 922    {
 923      "id": "google/gemma-4-31b-it",
 924      "name": "Google: Gemma 4 31B",
 925      "cost_per_1m_in": 0.2,
 926      "cost_per_1m_out": 0.5,
 927      "cost_per_1m_in_cached": 0,
 928      "cost_per_1m_out_cached": 0,
 929      "context_window": 262144,
 930      "default_max_tokens": 26214,
 931      "can_reason": true,
 932      "reasoning_levels": [
 933        "low",
 934        "medium",
 935        "high"
 936      ],
 937      "default_reasoning_effort": "medium",
 938      "supports_attachments": true
 939    },
 940    {
 941      "id": "google/gemma-4-31b-it:free",
 942      "name": "Google: Gemma 4 31B (free)",
 943      "cost_per_1m_in": 0,
 944      "cost_per_1m_out": 0,
 945      "cost_per_1m_in_cached": 0,
 946      "cost_per_1m_out_cached": 0,
 947      "context_window": 262144,
 948      "default_max_tokens": 16384,
 949      "can_reason": true,
 950      "reasoning_levels": [
 951        "low",
 952        "medium",
 953        "high"
 954      ],
 955      "default_reasoning_effort": "medium",
 956      "supports_attachments": true
 957    },
 958    {
 959      "id": "inception/mercury-2",
 960      "name": "Inception: Mercury 2",
 961      "cost_per_1m_in": 0.25,
 962      "cost_per_1m_out": 0.75,
 963      "cost_per_1m_in_cached": 0,
 964      "cost_per_1m_out_cached": 0.025,
 965      "context_window": 128000,
 966      "default_max_tokens": 25000,
 967      "can_reason": true,
 968      "reasoning_levels": [
 969        "low",
 970        "medium",
 971        "high"
 972      ],
 973      "default_reasoning_effort": "medium",
 974      "supports_attachments": false
 975    },
 976    {
 977      "id": "kwaipilot/kat-coder-pro-v2",
 978      "name": "Kwaipilot: KAT-Coder-Pro V2",
 979      "cost_per_1m_in": 0.3,
 980      "cost_per_1m_out": 1.2,
 981      "cost_per_1m_in_cached": 0,
 982      "cost_per_1m_out_cached": 0.06,
 983      "context_window": 262144,
 984      "default_max_tokens": 72000,
 985      "can_reason": false,
 986      "supports_attachments": false
 987    },
 988    {
 989      "id": "meta-llama/llama-3.1-70b-instruct",
 990      "name": "Meta: Llama 3.1 70B Instruct",
 991      "cost_per_1m_in": 0.4,
 992      "cost_per_1m_out": 0.4,
 993      "cost_per_1m_in_cached": 0,
 994      "cost_per_1m_out_cached": 0,
 995      "context_window": 131072,
 996      "default_max_tokens": 8192,
 997      "can_reason": false,
 998      "supports_attachments": false
 999    },
1000    {
1001      "id": "meta-llama/llama-3.3-70b-instruct",
1002      "name": "Meta: Llama 3.3 70B Instruct",
1003      "cost_per_1m_in": 0.13,
1004      "cost_per_1m_out": 0.4,
1005      "cost_per_1m_in_cached": 0,
1006      "cost_per_1m_out_cached": 0,
1007      "context_window": 131072,
1008      "default_max_tokens": 64000,
1009      "can_reason": false,
1010      "supports_attachments": false
1011    },
1012    {
1013      "id": "meta-llama/llama-3.3-70b-instruct:free",
1014      "name": "Meta: Llama 3.3 70B Instruct (free)",
1015      "cost_per_1m_in": 0,
1016      "cost_per_1m_out": 0,
1017      "cost_per_1m_in_cached": 0,
1018      "cost_per_1m_out_cached": 0,
1019      "context_window": 65536,
1020      "default_max_tokens": 6553,
1021      "can_reason": false,
1022      "supports_attachments": false
1023    },
1024    {
1025      "id": "meta-llama/llama-4-scout",
1026      "name": "Meta: Llama 4 Scout",
1027      "cost_per_1m_in": 0.25,
1028      "cost_per_1m_out": 0.7,
1029      "cost_per_1m_in_cached": 0,
1030      "cost_per_1m_out_cached": 0,
1031      "context_window": 1310720,
1032      "default_max_tokens": 4096,
1033      "can_reason": false,
1034      "supports_attachments": true
1035    },
1036    {
1037      "id": "minimax/minimax-m2",
1038      "name": "MiniMax: MiniMax M2",
1039      "cost_per_1m_in": 0.3,
1040      "cost_per_1m_out": 1.2,
1041      "cost_per_1m_in_cached": 0,
1042      "cost_per_1m_out_cached": 0,
1043      "context_window": 196608,
1044      "default_max_tokens": 98304,
1045      "can_reason": true,
1046      "reasoning_levels": [
1047        "low",
1048        "medium",
1049        "high"
1050      ],
1051      "default_reasoning_effort": "medium",
1052      "supports_attachments": false
1053    },
1054    {
1055      "id": "minimax/minimax-m2.1",
1056      "name": "MiniMax: MiniMax M2.1",
1057      "cost_per_1m_in": 0.3,
1058      "cost_per_1m_out": 1.2,
1059      "cost_per_1m_in_cached": 0,
1060      "cost_per_1m_out_cached": 0.03,
1061      "context_window": 204800,
1062      "default_max_tokens": 65536,
1063      "can_reason": true,
1064      "reasoning_levels": [
1065        "low",
1066        "medium",
1067        "high"
1068      ],
1069      "default_reasoning_effort": "medium",
1070      "supports_attachments": false
1071    },
1072    {
1073      "id": "minimax/minimax-m2.5",
1074      "name": "MiniMax: MiniMax M2.5",
1075      "cost_per_1m_in": 0.3,
1076      "cost_per_1m_out": 1.2,
1077      "cost_per_1m_in_cached": 0,
1078      "cost_per_1m_out_cached": 0.03,
1079      "context_window": 204800,
1080      "default_max_tokens": 65550,
1081      "can_reason": true,
1082      "reasoning_levels": [
1083        "low",
1084        "medium",
1085        "high"
1086      ],
1087      "default_reasoning_effort": "medium",
1088      "supports_attachments": false
1089    },
1090    {
1091      "id": "minimax/minimax-m2.5:free",
1092      "name": "MiniMax: MiniMax M2.5 (free)",
1093      "cost_per_1m_in": 0,
1094      "cost_per_1m_out": 0,
1095      "cost_per_1m_in_cached": 0,
1096      "cost_per_1m_out_cached": 0,
1097      "context_window": 196608,
1098      "default_max_tokens": 4096,
1099      "can_reason": true,
1100      "reasoning_levels": [
1101        "low",
1102        "medium",
1103        "high"
1104      ],
1105      "default_reasoning_effort": "medium",
1106      "supports_attachments": false
1107    },
1108    {
1109      "id": "minimax/minimax-m2.7",
1110      "name": "MiniMax: MiniMax M2.7",
1111      "cost_per_1m_in": 0.3,
1112      "cost_per_1m_out": 1.2,
1113      "cost_per_1m_in_cached": 0,
1114      "cost_per_1m_out_cached": 0.06,
1115      "context_window": 204800,
1116      "default_max_tokens": 65536,
1117      "can_reason": true,
1118      "reasoning_levels": [
1119        "low",
1120        "medium",
1121        "high"
1122      ],
1123      "default_reasoning_effort": "medium",
1124      "supports_attachments": false
1125    },
1126    {
1127      "id": "mistralai/mistral-large",
1128      "name": "Mistral Large",
1129      "cost_per_1m_in": 2,
1130      "cost_per_1m_out": 6,
1131      "cost_per_1m_in_cached": 0,
1132      "cost_per_1m_out_cached": 0.2,
1133      "context_window": 128000,
1134      "default_max_tokens": 12800,
1135      "can_reason": false,
1136      "supports_attachments": false
1137    },
1138    {
1139      "id": "mistralai/mistral-large-2407",
1140      "name": "Mistral Large 2407",
1141      "cost_per_1m_in": 2,
1142      "cost_per_1m_out": 6,
1143      "cost_per_1m_in_cached": 0,
1144      "cost_per_1m_out_cached": 0.2,
1145      "context_window": 131072,
1146      "default_max_tokens": 13107,
1147      "can_reason": false,
1148      "supports_attachments": false
1149    },
1150    {
1151      "id": "mistralai/mistral-large-2411",
1152      "name": "Mistral Large 2411",
1153      "cost_per_1m_in": 2,
1154      "cost_per_1m_out": 6,
1155      "cost_per_1m_in_cached": 0,
1156      "cost_per_1m_out_cached": 0.2,
1157      "context_window": 131072,
1158      "default_max_tokens": 13107,
1159      "can_reason": false,
1160      "supports_attachments": false
1161    },
1162    {
1163      "id": "mistralai/codestral-2508",
1164      "name": "Mistral: Codestral 2508",
1165      "cost_per_1m_in": 0.3,
1166      "cost_per_1m_out": 0.9,
1167      "cost_per_1m_in_cached": 0,
1168      "cost_per_1m_out_cached": 0.03,
1169      "context_window": 256000,
1170      "default_max_tokens": 25600,
1171      "can_reason": false,
1172      "supports_attachments": false
1173    },
1174    {
1175      "id": "mistralai/devstral-2512",
1176      "name": "Mistral: Devstral 2 2512",
1177      "cost_per_1m_in": 0.4,
1178      "cost_per_1m_out": 2,
1179      "cost_per_1m_in_cached": 0,
1180      "cost_per_1m_out_cached": 0.04,
1181      "context_window": 262144,
1182      "default_max_tokens": 26214,
1183      "can_reason": false,
1184      "supports_attachments": false
1185    },
1186    {
1187      "id": "mistralai/devstral-medium",
1188      "name": "Mistral: Devstral Medium",
1189      "cost_per_1m_in": 0.4,
1190      "cost_per_1m_out": 2,
1191      "cost_per_1m_in_cached": 0,
1192      "cost_per_1m_out_cached": 0.04,
1193      "context_window": 131072,
1194      "default_max_tokens": 13107,
1195      "can_reason": false,
1196      "supports_attachments": false
1197    },
1198    {
1199      "id": "mistralai/devstral-small",
1200      "name": "Mistral: Devstral Small 1.1",
1201      "cost_per_1m_in": 0.1,
1202      "cost_per_1m_out": 0.3,
1203      "cost_per_1m_in_cached": 0,
1204      "cost_per_1m_out_cached": 0.01,
1205      "context_window": 131072,
1206      "default_max_tokens": 13107,
1207      "can_reason": false,
1208      "supports_attachments": false
1209    },
1210    {
1211      "id": "mistralai/ministral-14b-2512",
1212      "name": "Mistral: Ministral 3 14B 2512",
1213      "cost_per_1m_in": 0.2,
1214      "cost_per_1m_out": 0.2,
1215      "cost_per_1m_in_cached": 0,
1216      "cost_per_1m_out_cached": 0.02,
1217      "context_window": 262144,
1218      "default_max_tokens": 26214,
1219      "can_reason": false,
1220      "supports_attachments": true
1221    },
1222    {
1223      "id": "mistralai/ministral-3b-2512",
1224      "name": "Mistral: Ministral 3 3B 2512",
1225      "cost_per_1m_in": 0.1,
1226      "cost_per_1m_out": 0.1,
1227      "cost_per_1m_in_cached": 0,
1228      "cost_per_1m_out_cached": 0.01,
1229      "context_window": 131072,
1230      "default_max_tokens": 13107,
1231      "can_reason": false,
1232      "supports_attachments": true
1233    },
1234    {
1235      "id": "mistralai/ministral-8b-2512",
1236      "name": "Mistral: Ministral 3 8B 2512",
1237      "cost_per_1m_in": 0.3,
1238      "cost_per_1m_out": 0.3,
1239      "cost_per_1m_in_cached": 0,
1240      "cost_per_1m_out_cached": 0,
1241      "context_window": 262144,
1242      "default_max_tokens": 131072,
1243      "can_reason": false,
1244      "supports_attachments": true
1245    },
1246    {
1247      "id": "mistralai/mistral-large-2512",
1248      "name": "Mistral: Mistral Large 3 2512",
1249      "cost_per_1m_in": 0.5,
1250      "cost_per_1m_out": 1.5,
1251      "cost_per_1m_in_cached": 0,
1252      "cost_per_1m_out_cached": 0.05,
1253      "context_window": 262144,
1254      "default_max_tokens": 26214,
1255      "can_reason": false,
1256      "supports_attachments": true
1257    },
1258    {
1259      "id": "mistralai/mistral-medium-3",
1260      "name": "Mistral: Mistral Medium 3",
1261      "cost_per_1m_in": 0.4,
1262      "cost_per_1m_out": 2,
1263      "cost_per_1m_in_cached": 0,
1264      "cost_per_1m_out_cached": 0.04,
1265      "context_window": 131072,
1266      "default_max_tokens": 13107,
1267      "can_reason": false,
1268      "supports_attachments": true
1269    },
1270    {
1271      "id": "mistralai/mistral-medium-3.1",
1272      "name": "Mistral: Mistral Medium 3.1",
1273      "cost_per_1m_in": 0.4,
1274      "cost_per_1m_out": 2,
1275      "cost_per_1m_in_cached": 0,
1276      "cost_per_1m_out_cached": 0.04,
1277      "context_window": 131072,
1278      "default_max_tokens": 13107,
1279      "can_reason": false,
1280      "supports_attachments": true
1281    },
1282    {
1283      "id": "mistralai/mistral-nemo",
1284      "name": "Mistral: Mistral Nemo",
1285      "cost_per_1m_in": 0.15,
1286      "cost_per_1m_out": 0.15,
1287      "cost_per_1m_in_cached": 0,
1288      "cost_per_1m_out_cached": 0.015,
1289      "context_window": 131072,
1290      "default_max_tokens": 13107,
1291      "can_reason": false,
1292      "supports_attachments": false
1293    },
1294    {
1295      "id": "mistralai/mistral-small-3.2-24b-instruct",
1296      "name": "Mistral: Mistral Small 3.2 24B",
1297      "cost_per_1m_in": 0.09375,
1298      "cost_per_1m_out": 0.25,
1299      "cost_per_1m_in_cached": 0,
1300      "cost_per_1m_out_cached": 0,
1301      "context_window": 256000,
1302      "default_max_tokens": 8192,
1303      "can_reason": false,
1304      "supports_attachments": true
1305    },
1306    {
1307      "id": "mistralai/mistral-small-2603",
1308      "name": "Mistral: Mistral Small 4",
1309      "cost_per_1m_in": 0.15,
1310      "cost_per_1m_out": 0.6,
1311      "cost_per_1m_in_cached": 0,
1312      "cost_per_1m_out_cached": 0.015,
1313      "context_window": 262144,
1314      "default_max_tokens": 26214,
1315      "can_reason": true,
1316      "reasoning_levels": [
1317        "low",
1318        "medium",
1319        "high"
1320      ],
1321      "default_reasoning_effort": "medium",
1322      "supports_attachments": true
1323    },
1324    {
1325      "id": "mistralai/mistral-small-creative",
1326      "name": "Mistral: Mistral Small Creative",
1327      "cost_per_1m_in": 0.1,
1328      "cost_per_1m_out": 0.3,
1329      "cost_per_1m_in_cached": 0,
1330      "cost_per_1m_out_cached": 0.01,
1331      "context_window": 32768,
1332      "default_max_tokens": 3276,
1333      "can_reason": false,
1334      "supports_attachments": false
1335    },
1336    {
1337      "id": "mistralai/mixtral-8x22b-instruct",
1338      "name": "Mistral: Mixtral 8x22B Instruct",
1339      "cost_per_1m_in": 2,
1340      "cost_per_1m_out": 6,
1341      "cost_per_1m_in_cached": 0,
1342      "cost_per_1m_out_cached": 0.2,
1343      "context_window": 65536,
1344      "default_max_tokens": 6553,
1345      "can_reason": false,
1346      "supports_attachments": false
1347    },
1348    {
1349      "id": "mistralai/mixtral-8x7b-instruct",
1350      "name": "Mistral: Mixtral 8x7B Instruct",
1351      "cost_per_1m_in": 0.54,
1352      "cost_per_1m_out": 0.54,
1353      "cost_per_1m_in_cached": 0,
1354      "cost_per_1m_out_cached": 0,
1355      "context_window": 32768,
1356      "default_max_tokens": 8192,
1357      "can_reason": false,
1358      "supports_attachments": false
1359    },
1360    {
1361      "id": "mistralai/pixtral-large-2411",
1362      "name": "Mistral: Pixtral Large 2411",
1363      "cost_per_1m_in": 2,
1364      "cost_per_1m_out": 6,
1365      "cost_per_1m_in_cached": 0,
1366      "cost_per_1m_out_cached": 0.2,
1367      "context_window": 131072,
1368      "default_max_tokens": 13107,
1369      "can_reason": false,
1370      "supports_attachments": true
1371    },
1372    {
1373      "id": "mistralai/mistral-saba",
1374      "name": "Mistral: Saba",
1375      "cost_per_1m_in": 0.2,
1376      "cost_per_1m_out": 0.6,
1377      "cost_per_1m_in_cached": 0,
1378      "cost_per_1m_out_cached": 0.02,
1379      "context_window": 32768,
1380      "default_max_tokens": 3276,
1381      "can_reason": false,
1382      "supports_attachments": false
1383    },
1384    {
1385      "id": "mistralai/voxtral-small-24b-2507",
1386      "name": "Mistral: Voxtral Small 24B 2507",
1387      "cost_per_1m_in": 0.1,
1388      "cost_per_1m_out": 0.3,
1389      "cost_per_1m_in_cached": 0,
1390      "cost_per_1m_out_cached": 0.01,
1391      "context_window": 32000,
1392      "default_max_tokens": 3200,
1393      "can_reason": false,
1394      "supports_attachments": false
1395    },
1396    {
1397      "id": "moonshotai/kimi-k2",
1398      "name": "MoonshotAI: Kimi K2 0711",
1399      "cost_per_1m_in": 0.57,
1400      "cost_per_1m_out": 2.3,
1401      "cost_per_1m_in_cached": 0,
1402      "cost_per_1m_out_cached": 0,
1403      "context_window": 131072,
1404      "default_max_tokens": 16384,
1405      "can_reason": false,
1406      "supports_attachments": false
1407    },
1408    {
1409      "id": "moonshotai/kimi-k2-0905",
1410      "name": "MoonshotAI: Kimi K2 0905",
1411      "cost_per_1m_in": 0.6,
1412      "cost_per_1m_out": 2.5,
1413      "cost_per_1m_in_cached": 0,
1414      "cost_per_1m_out_cached": 0,
1415      "context_window": 262144,
1416      "default_max_tokens": 131072,
1417      "can_reason": false,
1418      "supports_attachments": false
1419    },
1420    {
1421      "id": "moonshotai/kimi-k2-thinking",
1422      "name": "MoonshotAI: Kimi K2 Thinking",
1423      "cost_per_1m_in": 0.6,
1424      "cost_per_1m_out": 2.5,
1425      "cost_per_1m_in_cached": 0,
1426      "cost_per_1m_out_cached": 0.6,
1427      "context_window": 262144,
1428      "default_max_tokens": 131072,
1429      "can_reason": true,
1430      "reasoning_levels": [
1431        "low",
1432        "medium",
1433        "high"
1434      ],
1435      "default_reasoning_effort": "medium",
1436      "supports_attachments": false
1437    },
1438    {
1439      "id": "moonshotai/kimi-k2.5",
1440      "name": "MoonshotAI: Kimi K2.5",
1441      "cost_per_1m_in": 0.445,
1442      "cost_per_1m_out": 2,
1443      "cost_per_1m_in_cached": 0,
1444      "cost_per_1m_out_cached": 0.225,
1445      "context_window": 262144,
1446      "default_max_tokens": 131072,
1447      "can_reason": true,
1448      "reasoning_levels": [
1449        "low",
1450        "medium",
1451        "high"
1452      ],
1453      "default_reasoning_effort": "medium",
1454      "supports_attachments": true
1455    },
1456    {
1457      "id": "moonshotai/kimi-k2.6",
1458      "name": "MoonshotAI: Kimi K2.6",
1459      "cost_per_1m_in": 0.95,
1460      "cost_per_1m_out": 4,
1461      "cost_per_1m_in_cached": 0,
1462      "cost_per_1m_out_cached": 0.16,
1463      "context_window": 262144,
1464      "default_max_tokens": 26214,
1465      "can_reason": true,
1466      "reasoning_levels": [
1467        "low",
1468        "medium",
1469        "high"
1470      ],
1471      "default_reasoning_effort": "medium",
1472      "supports_attachments": true
1473    },
1474    {
1475      "id": "nvidia/llama-3.1-nemotron-70b-instruct",
1476      "name": "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
1477      "cost_per_1m_in": 1.2,
1478      "cost_per_1m_out": 1.2,
1479      "cost_per_1m_in_cached": 0,
1480      "cost_per_1m_out_cached": 0,
1481      "context_window": 131072,
1482      "default_max_tokens": 8192,
1483      "can_reason": false,
1484      "supports_attachments": false
1485    },
1486    {
1487      "id": "nvidia/llama-3.3-nemotron-super-49b-v1.5",
1488      "name": "NVIDIA: Llama 3.3 Nemotron Super 49B V1.5",
1489      "cost_per_1m_in": 0.1,
1490      "cost_per_1m_out": 0.4,
1491      "cost_per_1m_in_cached": 0,
1492      "cost_per_1m_out_cached": 0,
1493      "context_window": 131072,
1494      "default_max_tokens": 13107,
1495      "can_reason": true,
1496      "reasoning_levels": [
1497        "low",
1498        "medium",
1499        "high"
1500      ],
1501      "default_reasoning_effort": "medium",
1502      "supports_attachments": false
1503    },
1504    {
1505      "id": "nvidia/nemotron-3-nano-30b-a3b",
1506      "name": "NVIDIA: Nemotron 3 Nano 30B A3B",
1507      "cost_per_1m_in": 0.05,
1508      "cost_per_1m_out": 0.2,
1509      "cost_per_1m_in_cached": 0,
1510      "cost_per_1m_out_cached": 0,
1511      "context_window": 262144,
1512      "default_max_tokens": 26214,
1513      "can_reason": true,
1514      "reasoning_levels": [
1515        "low",
1516        "medium",
1517        "high"
1518      ],
1519      "default_reasoning_effort": "medium",
1520      "supports_attachments": false
1521    },
1522    {
1523      "id": "nvidia/nemotron-3-nano-30b-a3b:free",
1524      "name": "NVIDIA: Nemotron 3 Nano 30B A3B (free)",
1525      "cost_per_1m_in": 0,
1526      "cost_per_1m_out": 0,
1527      "cost_per_1m_in_cached": 0,
1528      "cost_per_1m_out_cached": 0,
1529      "context_window": 256000,
1530      "default_max_tokens": 25600,
1531      "can_reason": true,
1532      "reasoning_levels": [
1533        "low",
1534        "medium",
1535        "high"
1536      ],
1537      "default_reasoning_effort": "medium",
1538      "supports_attachments": false
1539    },
1540    {
1541      "id": "nvidia/nemotron-3-super-120b-a12b",
1542      "name": "NVIDIA: Nemotron 3 Super",
1543      "cost_per_1m_in": 0.1,
1544      "cost_per_1m_out": 0.5,
1545      "cost_per_1m_in_cached": 0,
1546      "cost_per_1m_out_cached": 0.1,
1547      "context_window": 262144,
1548      "default_max_tokens": 26214,
1549      "can_reason": true,
1550      "reasoning_levels": [
1551        "low",
1552        "medium",
1553        "high"
1554      ],
1555      "default_reasoning_effort": "medium",
1556      "supports_attachments": false
1557    },
1558    {
1559      "id": "nvidia/nemotron-3-super-120b-a12b:free",
1560      "name": "NVIDIA: Nemotron 3 Super (free)",
1561      "cost_per_1m_in": 0,
1562      "cost_per_1m_out": 0,
1563      "cost_per_1m_in_cached": 0,
1564      "cost_per_1m_out_cached": 0,
1565      "context_window": 262144,
1566      "default_max_tokens": 131072,
1567      "can_reason": true,
1568      "reasoning_levels": [
1569        "low",
1570        "medium",
1571        "high"
1572      ],
1573      "default_reasoning_effort": "medium",
1574      "supports_attachments": false
1575    },
1576    {
1577      "id": "nvidia/nemotron-nano-12b-v2-vl:free",
1578      "name": "NVIDIA: Nemotron Nano 12B 2 VL (free)",
1579      "cost_per_1m_in": 0,
1580      "cost_per_1m_out": 0,
1581      "cost_per_1m_in_cached": 0,
1582      "cost_per_1m_out_cached": 0,
1583      "context_window": 128000,
1584      "default_max_tokens": 64000,
1585      "can_reason": true,
1586      "reasoning_levels": [
1587        "low",
1588        "medium",
1589        "high"
1590      ],
1591      "default_reasoning_effort": "medium",
1592      "supports_attachments": true
1593    },
1594    {
1595      "id": "nvidia/nemotron-nano-9b-v2",
1596      "name": "NVIDIA: Nemotron Nano 9B V2",
1597      "cost_per_1m_in": 0.04,
1598      "cost_per_1m_out": 0.16,
1599      "cost_per_1m_in_cached": 0,
1600      "cost_per_1m_out_cached": 0,
1601      "context_window": 131072,
1602      "default_max_tokens": 13107,
1603      "can_reason": true,
1604      "reasoning_levels": [
1605        "low",
1606        "medium",
1607        "high"
1608      ],
1609      "default_reasoning_effort": "medium",
1610      "supports_attachments": false
1611    },
1612    {
1613      "id": "nvidia/nemotron-nano-9b-v2:free",
1614      "name": "NVIDIA: Nemotron Nano 9B V2 (free)",
1615      "cost_per_1m_in": 0,
1616      "cost_per_1m_out": 0,
1617      "cost_per_1m_in_cached": 0,
1618      "cost_per_1m_out_cached": 0,
1619      "context_window": 128000,
1620      "default_max_tokens": 12800,
1621      "can_reason": true,
1622      "reasoning_levels": [
1623        "low",
1624        "medium",
1625        "high"
1626      ],
1627      "default_reasoning_effort": "medium",
1628      "supports_attachments": false
1629    },
1630    {
1631      "id": "nex-agi/deepseek-v3.1-nex-n1",
1632      "name": "Nex AGI: DeepSeek V3.1 Nex N1",
1633      "cost_per_1m_in": 0.135,
1634      "cost_per_1m_out": 0.5,
1635      "cost_per_1m_in_cached": 0,
1636      "cost_per_1m_out_cached": 0,
1637      "context_window": 131072,
1638      "default_max_tokens": 81920,
1639      "can_reason": false,
1640      "supports_attachments": false
1641    },
1642    {
1643      "id": "openai/gpt-audio",
1644      "name": "OpenAI: GPT Audio",
1645      "cost_per_1m_in": 2.5,
1646      "cost_per_1m_out": 10,
1647      "cost_per_1m_in_cached": 0,
1648      "cost_per_1m_out_cached": 0,
1649      "context_window": 128000,
1650      "default_max_tokens": 8192,
1651      "can_reason": false,
1652      "supports_attachments": false
1653    },
1654    {
1655      "id": "openai/gpt-audio-mini",
1656      "name": "OpenAI: GPT Audio Mini",
1657      "cost_per_1m_in": 0.6,
1658      "cost_per_1m_out": 2.4,
1659      "cost_per_1m_in_cached": 0,
1660      "cost_per_1m_out_cached": 0,
1661      "context_window": 128000,
1662      "default_max_tokens": 8192,
1663      "can_reason": false,
1664      "supports_attachments": false
1665    },
1666    {
1667      "id": "openai/gpt-4-turbo",
1668      "name": "OpenAI: GPT-4 Turbo",
1669      "cost_per_1m_in": 10,
1670      "cost_per_1m_out": 30,
1671      "cost_per_1m_in_cached": 0,
1672      "cost_per_1m_out_cached": 0,
1673      "context_window": 128000,
1674      "default_max_tokens": 2048,
1675      "can_reason": false,
1676      "supports_attachments": true
1677    },
1678    {
1679      "id": "openai/gpt-4-1106-preview",
1680      "name": "OpenAI: GPT-4 Turbo (older v1106)",
1681      "cost_per_1m_in": 10,
1682      "cost_per_1m_out": 30,
1683      "cost_per_1m_in_cached": 0,
1684      "cost_per_1m_out_cached": 0,
1685      "context_window": 128000,
1686      "default_max_tokens": 2048,
1687      "can_reason": false,
1688      "supports_attachments": false
1689    },
1690    {
1691      "id": "openai/gpt-4-turbo-preview",
1692      "name": "OpenAI: GPT-4 Turbo Preview",
1693      "cost_per_1m_in": 10,
1694      "cost_per_1m_out": 30,
1695      "cost_per_1m_in_cached": 0,
1696      "cost_per_1m_out_cached": 0,
1697      "context_window": 128000,
1698      "default_max_tokens": 2048,
1699      "can_reason": false,
1700      "supports_attachments": false
1701    },
1702    {
1703      "id": "openai/gpt-4.1",
1704      "name": "OpenAI: GPT-4.1",
1705      "cost_per_1m_in": 2,
1706      "cost_per_1m_out": 8,
1707      "cost_per_1m_in_cached": 0,
1708      "cost_per_1m_out_cached": 0.5,
1709      "context_window": 1047576,
1710      "default_max_tokens": 104757,
1711      "can_reason": false,
1712      "supports_attachments": true
1713    },
1714    {
1715      "id": "openai/gpt-4.1-mini",
1716      "name": "OpenAI: GPT-4.1 Mini",
1717      "cost_per_1m_in": 0.4,
1718      "cost_per_1m_out": 1.6,
1719      "cost_per_1m_in_cached": 0,
1720      "cost_per_1m_out_cached": 0.1,
1721      "context_window": 1047576,
1722      "default_max_tokens": 104757,
1723      "can_reason": false,
1724      "supports_attachments": true
1725    },
1726    {
1727      "id": "openai/gpt-4.1-nano",
1728      "name": "OpenAI: GPT-4.1 Nano",
1729      "cost_per_1m_in": 0.1,
1730      "cost_per_1m_out": 0.4,
1731      "cost_per_1m_in_cached": 0,
1732      "cost_per_1m_out_cached": 0.03,
1733      "context_window": 1047576,
1734      "default_max_tokens": 104757,
1735      "can_reason": false,
1736      "supports_attachments": true
1737    },
1738    {
1739      "id": "openai/gpt-4o",
1740      "name": "OpenAI: GPT-4o",
1741      "cost_per_1m_in": 2.5,
1742      "cost_per_1m_out": 10,
1743      "cost_per_1m_in_cached": 0,
1744      "cost_per_1m_out_cached": 0,
1745      "context_window": 128000,
1746      "default_max_tokens": 8192,
1747      "can_reason": false,
1748      "supports_attachments": true
1749    },
1750    {
1751      "id": "openai/gpt-4o-2024-05-13",
1752      "name": "OpenAI: GPT-4o (2024-05-13)",
1753      "cost_per_1m_in": 5,
1754      "cost_per_1m_out": 15,
1755      "cost_per_1m_in_cached": 0,
1756      "cost_per_1m_out_cached": 0,
1757      "context_window": 128000,
1758      "default_max_tokens": 2048,
1759      "can_reason": false,
1760      "supports_attachments": true
1761    },
1762    {
1763      "id": "openai/gpt-4o-2024-08-06",
1764      "name": "OpenAI: GPT-4o (2024-08-06)",
1765      "cost_per_1m_in": 2.5,
1766      "cost_per_1m_out": 10,
1767      "cost_per_1m_in_cached": 0,
1768      "cost_per_1m_out_cached": 1.25,
1769      "context_window": 128000,
1770      "default_max_tokens": 8192,
1771      "can_reason": false,
1772      "supports_attachments": true
1773    },
1774    {
1775      "id": "openai/gpt-4o-2024-11-20",
1776      "name": "OpenAI: GPT-4o (2024-11-20)",
1777      "cost_per_1m_in": 2.5,
1778      "cost_per_1m_out": 10,
1779      "cost_per_1m_in_cached": 0,
1780      "cost_per_1m_out_cached": 1.25,
1781      "context_window": 128000,
1782      "default_max_tokens": 8192,
1783      "can_reason": false,
1784      "supports_attachments": true
1785    },
1786    {
1787      "id": "openai/gpt-4o-audio-preview",
1788      "name": "OpenAI: GPT-4o Audio",
1789      "cost_per_1m_in": 2.5,
1790      "cost_per_1m_out": 10,
1791      "cost_per_1m_in_cached": 0,
1792      "cost_per_1m_out_cached": 0,
1793      "context_window": 128000,
1794      "default_max_tokens": 8192,
1795      "can_reason": false,
1796      "supports_attachments": false
1797    },
1798    {
1799      "id": "openai/gpt-4o-mini",
1800      "name": "OpenAI: GPT-4o-mini",
1801      "cost_per_1m_in": 0.15,
1802      "cost_per_1m_out": 0.6,
1803      "cost_per_1m_in_cached": 0,
1804      "cost_per_1m_out_cached": 0.075,
1805      "context_window": 128000,
1806      "default_max_tokens": 8192,
1807      "can_reason": false,
1808      "supports_attachments": true
1809    },
1810    {
1811      "id": "openai/gpt-4o-mini-2024-07-18",
1812      "name": "OpenAI: GPT-4o-mini (2024-07-18)",
1813      "cost_per_1m_in": 0.15,
1814      "cost_per_1m_out": 0.6,
1815      "cost_per_1m_in_cached": 0,
1816      "cost_per_1m_out_cached": 0.075,
1817      "context_window": 128000,
1818      "default_max_tokens": 8192,
1819      "can_reason": false,
1820      "supports_attachments": true
1821    },
1822    {
1823      "id": "openai/gpt-5",
1824      "name": "OpenAI: GPT-5",
1825      "cost_per_1m_in": 1.25,
1826      "cost_per_1m_out": 10,
1827      "cost_per_1m_in_cached": 0,
1828      "cost_per_1m_out_cached": 0.125,
1829      "context_window": 400000,
1830      "default_max_tokens": 64000,
1831      "can_reason": true,
1832      "reasoning_levels": [
1833        "low",
1834        "medium",
1835        "high"
1836      ],
1837      "default_reasoning_effort": "medium",
1838      "supports_attachments": true
1839    },
1840    {
1841      "id": "openai/gpt-5-codex",
1842      "name": "OpenAI: GPT-5 Codex",
1843      "cost_per_1m_in": 1.25,
1844      "cost_per_1m_out": 10,
1845      "cost_per_1m_in_cached": 0,
1846      "cost_per_1m_out_cached": 0.125,
1847      "context_window": 400000,
1848      "default_max_tokens": 64000,
1849      "can_reason": true,
1850      "reasoning_levels": [
1851        "low",
1852        "medium",
1853        "high"
1854      ],
1855      "default_reasoning_effort": "medium",
1856      "supports_attachments": true
1857    },
1858    {
1859      "id": "openai/gpt-5-image",
1860      "name": "OpenAI: GPT-5 Image",
1861      "cost_per_1m_in": 10,
1862      "cost_per_1m_out": 10,
1863      "cost_per_1m_in_cached": 0,
1864      "cost_per_1m_out_cached": 1.25,
1865      "context_window": 400000,
1866      "default_max_tokens": 64000,
1867      "can_reason": true,
1868      "reasoning_levels": [
1869        "low",
1870        "medium",
1871        "high"
1872      ],
1873      "default_reasoning_effort": "medium",
1874      "supports_attachments": true
1875    },
1876    {
1877      "id": "openai/gpt-5-image-mini",
1878      "name": "OpenAI: GPT-5 Image Mini",
1879      "cost_per_1m_in": 2.5,
1880      "cost_per_1m_out": 2,
1881      "cost_per_1m_in_cached": 0,
1882      "cost_per_1m_out_cached": 0.25,
1883      "context_window": 400000,
1884      "default_max_tokens": 64000,
1885      "can_reason": true,
1886      "reasoning_levels": [
1887        "low",
1888        "medium",
1889        "high"
1890      ],
1891      "default_reasoning_effort": "medium",
1892      "supports_attachments": true
1893    },
1894    {
1895      "id": "openai/gpt-5-mini",
1896      "name": "OpenAI: GPT-5 Mini",
1897      "cost_per_1m_in": 0.25,
1898      "cost_per_1m_out": 2,
1899      "cost_per_1m_in_cached": 0,
1900      "cost_per_1m_out_cached": 0.03,
1901      "context_window": 400000,
1902      "default_max_tokens": 40000,
1903      "can_reason": true,
1904      "reasoning_levels": [
1905        "low",
1906        "medium",
1907        "high"
1908      ],
1909      "default_reasoning_effort": "medium",
1910      "supports_attachments": true
1911    },
1912    {
1913      "id": "openai/gpt-5-nano",
1914      "name": "OpenAI: GPT-5 Nano",
1915      "cost_per_1m_in": 0.05,
1916      "cost_per_1m_out": 0.4,
1917      "cost_per_1m_in_cached": 0,
1918      "cost_per_1m_out_cached": 0.01,
1919      "context_window": 400000,
1920      "default_max_tokens": 40000,
1921      "can_reason": true,
1922      "reasoning_levels": [
1923        "low",
1924        "medium",
1925        "high"
1926      ],
1927      "default_reasoning_effort": "medium",
1928      "supports_attachments": true
1929    },
1930    {
1931      "id": "openai/gpt-5-pro",
1932      "name": "OpenAI: GPT-5 Pro",
1933      "cost_per_1m_in": 15,
1934      "cost_per_1m_out": 120,
1935      "cost_per_1m_in_cached": 0,
1936      "cost_per_1m_out_cached": 0,
1937      "context_window": 400000,
1938      "default_max_tokens": 64000,
1939      "can_reason": true,
1940      "reasoning_levels": [
1941        "low",
1942        "medium",
1943        "high"
1944      ],
1945      "default_reasoning_effort": "medium",
1946      "supports_attachments": true
1947    },
1948    {
1949      "id": "openai/gpt-5.1",
1950      "name": "OpenAI: GPT-5.1",
1951      "cost_per_1m_in": 1.25,
1952      "cost_per_1m_out": 10,
1953      "cost_per_1m_in_cached": 0,
1954      "cost_per_1m_out_cached": 0.13,
1955      "context_window": 400000,
1956      "default_max_tokens": 64000,
1957      "can_reason": true,
1958      "reasoning_levels": [
1959        "low",
1960        "medium",
1961        "high"
1962      ],
1963      "default_reasoning_effort": "medium",
1964      "supports_attachments": true
1965    },
1966    {
1967      "id": "openai/gpt-5.1-chat",
1968      "name": "OpenAI: GPT-5.1 Chat",
1969      "cost_per_1m_in": 1.25,
1970      "cost_per_1m_out": 10,
1971      "cost_per_1m_in_cached": 0,
1972      "cost_per_1m_out_cached": 0.125,
1973      "context_window": 128000,
1974      "default_max_tokens": 8192,
1975      "can_reason": false,
1976      "supports_attachments": true
1977    },
1978    {
1979      "id": "openai/gpt-5.1-codex",
1980      "name": "OpenAI: GPT-5.1-Codex",
1981      "cost_per_1m_in": 1.25,
1982      "cost_per_1m_out": 10,
1983      "cost_per_1m_in_cached": 0,
1984      "cost_per_1m_out_cached": 0.13,
1985      "context_window": 400000,
1986      "default_max_tokens": 64000,
1987      "can_reason": true,
1988      "reasoning_levels": [
1989        "low",
1990        "medium",
1991        "high"
1992      ],
1993      "default_reasoning_effort": "medium",
1994      "supports_attachments": true
1995    },
1996    {
1997      "id": "openai/gpt-5.1-codex-max",
1998      "name": "OpenAI: GPT-5.1-Codex-Max",
1999      "cost_per_1m_in": 1.25,
2000      "cost_per_1m_out": 10,
2001      "cost_per_1m_in_cached": 0,
2002      "cost_per_1m_out_cached": 0.125,
2003      "context_window": 400000,
2004      "default_max_tokens": 64000,
2005      "can_reason": true,
2006      "reasoning_levels": [
2007        "low",
2008        "medium",
2009        "high"
2010      ],
2011      "default_reasoning_effort": "medium",
2012      "supports_attachments": true
2013    },
2014    {
2015      "id": "openai/gpt-5.1-codex-mini",
2016      "name": "OpenAI: GPT-5.1-Codex-Mini",
2017      "cost_per_1m_in": 0.25,
2018      "cost_per_1m_out": 2,
2019      "cost_per_1m_in_cached": 0,
2020      "cost_per_1m_out_cached": 0.025,
2021      "context_window": 400000,
2022      "default_max_tokens": 50000,
2023      "can_reason": true,
2024      "reasoning_levels": [
2025        "low",
2026        "medium",
2027        "high"
2028      ],
2029      "default_reasoning_effort": "medium",
2030      "supports_attachments": true
2031    },
2032    {
2033      "id": "openai/gpt-5.2",
2034      "name": "OpenAI: GPT-5.2",
2035      "cost_per_1m_in": 1.75,
2036      "cost_per_1m_out": 14,
2037      "cost_per_1m_in_cached": 0,
2038      "cost_per_1m_out_cached": 0.175,
2039      "context_window": 400000,
2040      "default_max_tokens": 64000,
2041      "can_reason": true,
2042      "reasoning_levels": [
2043        "low",
2044        "medium",
2045        "high"
2046      ],
2047      "default_reasoning_effort": "medium",
2048      "supports_attachments": true
2049    },
2050    {
2051      "id": "openai/gpt-5.2-chat",
2052      "name": "OpenAI: GPT-5.2 Chat",
2053      "cost_per_1m_in": 1.75,
2054      "cost_per_1m_out": 14,
2055      "cost_per_1m_in_cached": 0,
2056      "cost_per_1m_out_cached": 0.175,
2057      "context_window": 128000,
2058      "default_max_tokens": 16000,
2059      "can_reason": false,
2060      "supports_attachments": true
2061    },
2062    {
2063      "id": "openai/gpt-5.2-pro",
2064      "name": "OpenAI: GPT-5.2 Pro",
2065      "cost_per_1m_in": 21,
2066      "cost_per_1m_out": 168,
2067      "cost_per_1m_in_cached": 0,
2068      "cost_per_1m_out_cached": 0,
2069      "context_window": 400000,
2070      "default_max_tokens": 64000,
2071      "can_reason": true,
2072      "reasoning_levels": [
2073        "low",
2074        "medium",
2075        "high"
2076      ],
2077      "default_reasoning_effort": "medium",
2078      "supports_attachments": true
2079    },
2080    {
2081      "id": "openai/gpt-5.2-codex",
2082      "name": "OpenAI: GPT-5.2-Codex",
2083      "cost_per_1m_in": 1.75,
2084      "cost_per_1m_out": 14,
2085      "cost_per_1m_in_cached": 0,
2086      "cost_per_1m_out_cached": 0.175,
2087      "context_window": 400000,
2088      "default_max_tokens": 64000,
2089      "can_reason": true,
2090      "reasoning_levels": [
2091        "low",
2092        "medium",
2093        "high"
2094      ],
2095      "default_reasoning_effort": "medium",
2096      "supports_attachments": true
2097    },
2098    {
2099      "id": "openai/gpt-5.3-chat",
2100      "name": "OpenAI: GPT-5.3 Chat",
2101      "cost_per_1m_in": 1.75,
2102      "cost_per_1m_out": 14,
2103      "cost_per_1m_in_cached": 0,
2104      "cost_per_1m_out_cached": 0.175,
2105      "context_window": 128000,
2106      "default_max_tokens": 8192,
2107      "can_reason": false,
2108      "supports_attachments": true
2109    },
2110    {
2111      "id": "openai/gpt-5.3-codex",
2112      "name": "OpenAI: GPT-5.3-Codex",
2113      "cost_per_1m_in": 1.75,
2114      "cost_per_1m_out": 14,
2115      "cost_per_1m_in_cached": 0,
2116      "cost_per_1m_out_cached": 0.175,
2117      "context_window": 400000,
2118      "default_max_tokens": 64000,
2119      "can_reason": true,
2120      "reasoning_levels": [
2121        "low",
2122        "medium",
2123        "high"
2124      ],
2125      "default_reasoning_effort": "medium",
2126      "supports_attachments": true
2127    },
2128    {
2129      "id": "openai/gpt-5.4",
2130      "name": "OpenAI: GPT-5.4",
2131      "cost_per_1m_in": 2.5,
2132      "cost_per_1m_out": 15,
2133      "cost_per_1m_in_cached": 0,
2134      "cost_per_1m_out_cached": 0.25,
2135      "context_window": 1050000,
2136      "default_max_tokens": 64000,
2137      "can_reason": true,
2138      "reasoning_levels": [
2139        "low",
2140        "medium",
2141        "high"
2142      ],
2143      "default_reasoning_effort": "medium",
2144      "supports_attachments": true
2145    },
2146    {
2147      "id": "openai/gpt-5.4-mini",
2148      "name": "OpenAI: GPT-5.4 Mini",
2149      "cost_per_1m_in": 0.75,
2150      "cost_per_1m_out": 4.5,
2151      "cost_per_1m_in_cached": 0,
2152      "cost_per_1m_out_cached": 0.075,
2153      "context_window": 400000,
2154      "default_max_tokens": 64000,
2155      "can_reason": true,
2156      "reasoning_levels": [
2157        "low",
2158        "medium",
2159        "high"
2160      ],
2161      "default_reasoning_effort": "medium",
2162      "supports_attachments": true
2163    },
2164    {
2165      "id": "openai/gpt-5.4-nano",
2166      "name": "OpenAI: GPT-5.4 Nano",
2167      "cost_per_1m_in": 0.2,
2168      "cost_per_1m_out": 1.25,
2169      "cost_per_1m_in_cached": 0,
2170      "cost_per_1m_out_cached": 0.02,
2171      "context_window": 400000,
2172      "default_max_tokens": 64000,
2173      "can_reason": true,
2174      "reasoning_levels": [
2175        "low",
2176        "medium",
2177        "high"
2178      ],
2179      "default_reasoning_effort": "medium",
2180      "supports_attachments": true
2181    },
2182    {
2183      "id": "openai/gpt-5.4-pro",
2184      "name": "OpenAI: GPT-5.4 Pro",
2185      "cost_per_1m_in": 30,
2186      "cost_per_1m_out": 180,
2187      "cost_per_1m_in_cached": 0,
2188      "cost_per_1m_out_cached": 0,
2189      "context_window": 1050000,
2190      "default_max_tokens": 64000,
2191      "can_reason": true,
2192      "reasoning_levels": [
2193        "low",
2194        "medium",
2195        "high"
2196      ],
2197      "default_reasoning_effort": "medium",
2198      "supports_attachments": true
2199    },
2200    {
2201      "id": "openai/gpt-oss-120b",
2202      "name": "OpenAI: gpt-oss-120b",
2203      "cost_per_1m_in": 0.15,
2204      "cost_per_1m_out": 0.6,
2205      "cost_per_1m_in_cached": 0,
2206      "cost_per_1m_out_cached": 0,
2207      "context_window": 131072,
2208      "default_max_tokens": 13107,
2209      "can_reason": true,
2210      "reasoning_levels": [
2211        "low",
2212        "medium",
2213        "high"
2214      ],
2215      "default_reasoning_effort": "medium",
2216      "supports_attachments": false
2217    },
2218    {
2219      "id": "openai/gpt-oss-120b:free",
2220      "name": "OpenAI: gpt-oss-120b (free)",
2221      "cost_per_1m_in": 0,
2222      "cost_per_1m_out": 0,
2223      "cost_per_1m_in_cached": 0,
2224      "cost_per_1m_out_cached": 0,
2225      "context_window": 131072,
2226      "default_max_tokens": 65536,
2227      "can_reason": true,
2228      "reasoning_levels": [
2229        "low",
2230        "medium",
2231        "high"
2232      ],
2233      "default_reasoning_effort": "medium",
2234      "supports_attachments": false
2235    },
2236    {
2237      "id": "openai/gpt-oss-20b",
2238      "name": "OpenAI: gpt-oss-20b",
2239      "cost_per_1m_in": 0.03,
2240      "cost_per_1m_out": 0.14,
2241      "cost_per_1m_in_cached": 0,
2242      "cost_per_1m_out_cached": 0,
2243      "context_window": 131072,
2244      "default_max_tokens": 13107,
2245      "can_reason": true,
2246      "reasoning_levels": [
2247        "low",
2248        "medium",
2249        "high"
2250      ],
2251      "default_reasoning_effort": "medium",
2252      "supports_attachments": false
2253    },
2254    {
2255      "id": "openai/gpt-oss-20b:free",
2256      "name": "OpenAI: gpt-oss-20b (free)",
2257      "cost_per_1m_in": 0,
2258      "cost_per_1m_out": 0,
2259      "cost_per_1m_in_cached": 0,
2260      "cost_per_1m_out_cached": 0,
2261      "context_window": 131072,
2262      "default_max_tokens": 4096,
2263      "can_reason": true,
2264      "reasoning_levels": [
2265        "low",
2266        "medium",
2267        "high"
2268      ],
2269      "default_reasoning_effort": "medium",
2270      "supports_attachments": false
2271    },
2272    {
2273      "id": "openai/gpt-oss-safeguard-20b",
2274      "name": "OpenAI: gpt-oss-safeguard-20b",
2275      "cost_per_1m_in": 0.075,
2276      "cost_per_1m_out": 0.3,
2277      "cost_per_1m_in_cached": 0,
2278      "cost_per_1m_out_cached": 0.037,
2279      "context_window": 131072,
2280      "default_max_tokens": 32768,
2281      "can_reason": true,
2282      "reasoning_levels": [
2283        "low",
2284        "medium",
2285        "high"
2286      ],
2287      "default_reasoning_effort": "medium",
2288      "supports_attachments": false
2289    },
2290    {
2291      "id": "openai/o1",
2292      "name": "OpenAI: o1",
2293      "cost_per_1m_in": 15,
2294      "cost_per_1m_out": 60,
2295      "cost_per_1m_in_cached": 0,
2296      "cost_per_1m_out_cached": 7.5,
2297      "context_window": 200000,
2298      "default_max_tokens": 50000,
2299      "can_reason": true,
2300      "reasoning_levels": [
2301        "low",
2302        "medium",
2303        "high"
2304      ],
2305      "default_reasoning_effort": "medium",
2306      "supports_attachments": true
2307    },
2308    {
2309      "id": "openai/o3",
2310      "name": "OpenAI: o3",
2311      "cost_per_1m_in": 2,
2312      "cost_per_1m_out": 8,
2313      "cost_per_1m_in_cached": 0,
2314      "cost_per_1m_out_cached": 0.5,
2315      "context_window": 200000,
2316      "default_max_tokens": 50000,
2317      "can_reason": true,
2318      "reasoning_levels": [
2319        "low",
2320        "medium",
2321        "high"
2322      ],
2323      "default_reasoning_effort": "medium",
2324      "supports_attachments": true
2325    },
2326    {
2327      "id": "openai/o3-deep-research",
2328      "name": "OpenAI: o3 Deep Research",
2329      "cost_per_1m_in": 10,
2330      "cost_per_1m_out": 40,
2331      "cost_per_1m_in_cached": 0,
2332      "cost_per_1m_out_cached": 2.5,
2333      "context_window": 200000,
2334      "default_max_tokens": 50000,
2335      "can_reason": true,
2336      "reasoning_levels": [
2337        "low",
2338        "medium",
2339        "high"
2340      ],
2341      "default_reasoning_effort": "medium",
2342      "supports_attachments": true
2343    },
2344    {
2345      "id": "openai/o3-mini",
2346      "name": "OpenAI: o3 Mini",
2347      "cost_per_1m_in": 1.1,
2348      "cost_per_1m_out": 4.4,
2349      "cost_per_1m_in_cached": 0,
2350      "cost_per_1m_out_cached": 0.55,
2351      "context_window": 200000,
2352      "default_max_tokens": 50000,
2353      "can_reason": true,
2354      "reasoning_levels": [
2355        "low",
2356        "medium",
2357        "high"
2358      ],
2359      "default_reasoning_effort": "medium",
2360      "supports_attachments": false
2361    },
2362    {
2363      "id": "openai/o3-mini-high",
2364      "name": "OpenAI: o3 Mini High",
2365      "cost_per_1m_in": 1.1,
2366      "cost_per_1m_out": 4.4,
2367      "cost_per_1m_in_cached": 0,
2368      "cost_per_1m_out_cached": 0.55,
2369      "context_window": 200000,
2370      "default_max_tokens": 50000,
2371      "can_reason": true,
2372      "reasoning_levels": [
2373        "low",
2374        "medium",
2375        "high"
2376      ],
2377      "default_reasoning_effort": "medium",
2378      "supports_attachments": false
2379    },
2380    {
2381      "id": "openai/o3-pro",
2382      "name": "OpenAI: o3 Pro",
2383      "cost_per_1m_in": 20,
2384      "cost_per_1m_out": 80,
2385      "cost_per_1m_in_cached": 0,
2386      "cost_per_1m_out_cached": 0,
2387      "context_window": 200000,
2388      "default_max_tokens": 50000,
2389      "can_reason": true,
2390      "reasoning_levels": [
2391        "low",
2392        "medium",
2393        "high"
2394      ],
2395      "default_reasoning_effort": "medium",
2396      "supports_attachments": true
2397    },
2398    {
2399      "id": "openai/o4-mini",
2400      "name": "OpenAI: o4 Mini",
2401      "cost_per_1m_in": 1.1,
2402      "cost_per_1m_out": 4.4,
2403      "cost_per_1m_in_cached": 0,
2404      "cost_per_1m_out_cached": 0.275,
2405      "context_window": 200000,
2406      "default_max_tokens": 50000,
2407      "can_reason": true,
2408      "reasoning_levels": [
2409        "low",
2410        "medium",
2411        "high"
2412      ],
2413      "default_reasoning_effort": "medium",
2414      "supports_attachments": true
2415    },
2416    {
2417      "id": "openai/o4-mini-deep-research",
2418      "name": "OpenAI: o4 Mini Deep Research",
2419      "cost_per_1m_in": 2,
2420      "cost_per_1m_out": 8,
2421      "cost_per_1m_in_cached": 0,
2422      "cost_per_1m_out_cached": 0.5,
2423      "context_window": 200000,
2424      "default_max_tokens": 50000,
2425      "can_reason": true,
2426      "reasoning_levels": [
2427        "low",
2428        "medium",
2429        "high"
2430      ],
2431      "default_reasoning_effort": "medium",
2432      "supports_attachments": true
2433    },
2434    {
2435      "id": "openai/o4-mini-high",
2436      "name": "OpenAI: o4 Mini High",
2437      "cost_per_1m_in": 1.1,
2438      "cost_per_1m_out": 4.4,
2439      "cost_per_1m_in_cached": 0,
2440      "cost_per_1m_out_cached": 0.275,
2441      "context_window": 200000,
2442      "default_max_tokens": 50000,
2443      "can_reason": true,
2444      "reasoning_levels": [
2445        "low",
2446        "medium",
2447        "high"
2448      ],
2449      "default_reasoning_effort": "medium",
2450      "supports_attachments": true
2451    },
2452    {
2453      "id": "prime-intellect/intellect-3",
2454      "name": "Prime Intellect: INTELLECT-3",
2455      "cost_per_1m_in": 0.2,
2456      "cost_per_1m_out": 1.1,
2457      "cost_per_1m_in_cached": 0,
2458      "cost_per_1m_out_cached": 0,
2459      "context_window": 131072,
2460      "default_max_tokens": 65536,
2461      "can_reason": true,
2462      "reasoning_levels": [
2463        "low",
2464        "medium",
2465        "high"
2466      ],
2467      "default_reasoning_effort": "medium",
2468      "supports_attachments": false
2469    },
2470    {
2471      "id": "qwen/qwen-2.5-72b-instruct",
2472      "name": "Qwen2.5 72B Instruct",
2473      "cost_per_1m_in": 0.12,
2474      "cost_per_1m_out": 0.39,
2475      "cost_per_1m_in_cached": 0,
2476      "cost_per_1m_out_cached": 0,
2477      "context_window": 32768,
2478      "default_max_tokens": 8192,
2479      "can_reason": false,
2480      "supports_attachments": false
2481    },
2482    {
2483      "id": "qwen/qwq-32b",
2484      "name": "Qwen: QwQ 32B",
2485      "cost_per_1m_in": 0.15,
2486      "cost_per_1m_out": 0.58,
2487      "cost_per_1m_in_cached": 0,
2488      "cost_per_1m_out_cached": 0,
2489      "context_window": 131072,
2490      "default_max_tokens": 65536,
2491      "can_reason": true,
2492      "reasoning_levels": [
2493        "low",
2494        "medium",
2495        "high"
2496      ],
2497      "default_reasoning_effort": "medium",
2498      "supports_attachments": false
2499    },
2500    {
2501      "id": "qwen/qwen-plus-2025-07-28",
2502      "name": "Qwen: Qwen Plus 0728",
2503      "cost_per_1m_in": 0.26,
2504      "cost_per_1m_out": 0.78,
2505      "cost_per_1m_in_cached": 0.325,
2506      "cost_per_1m_out_cached": 0,
2507      "context_window": 1000000,
2508      "default_max_tokens": 16384,
2509      "can_reason": false,
2510      "supports_attachments": false
2511    },
2512    {
2513      "id": "qwen/qwen-plus-2025-07-28:thinking",
2514      "name": "Qwen: Qwen Plus 0728 (thinking)",
2515      "cost_per_1m_in": 0.26,
2516      "cost_per_1m_out": 0.78,
2517      "cost_per_1m_in_cached": 0.325,
2518      "cost_per_1m_out_cached": 0,
2519      "context_window": 1000000,
2520      "default_max_tokens": 16384,
2521      "can_reason": true,
2522      "reasoning_levels": [
2523        "low",
2524        "medium",
2525        "high"
2526      ],
2527      "default_reasoning_effort": "medium",
2528      "supports_attachments": false
2529    },
2530    {
2531      "id": "qwen/qwen-vl-max",
2532      "name": "Qwen: Qwen VL Max",
2533      "cost_per_1m_in": 0.52,
2534      "cost_per_1m_out": 2.08,
2535      "cost_per_1m_in_cached": 0,
2536      "cost_per_1m_out_cached": 0,
2537      "context_window": 131072,
2538      "default_max_tokens": 16384,
2539      "can_reason": false,
2540      "supports_attachments": true
2541    },
2542    {
2543      "id": "qwen/qwen-max",
2544      "name": "Qwen: Qwen-Max ",
2545      "cost_per_1m_in": 1.04,
2546      "cost_per_1m_out": 4.16,
2547      "cost_per_1m_in_cached": 0,
2548      "cost_per_1m_out_cached": 0.208,
2549      "context_window": 32768,
2550      "default_max_tokens": 4096,
2551      "can_reason": false,
2552      "supports_attachments": false
2553    },
2554    {
2555      "id": "qwen/qwen-plus",
2556      "name": "Qwen: Qwen-Plus",
2557      "cost_per_1m_in": 0.26,
2558      "cost_per_1m_out": 0.78,
2559      "cost_per_1m_in_cached": 0.325,
2560      "cost_per_1m_out_cached": 0.052,
2561      "context_window": 1000000,
2562      "default_max_tokens": 16384,
2563      "can_reason": false,
2564      "supports_attachments": false
2565    },
2566    {
2567      "id": "qwen/qwen-turbo",
2568      "name": "Qwen: Qwen-Turbo",
2569      "cost_per_1m_in": 0.0325,
2570      "cost_per_1m_out": 0.13,
2571      "cost_per_1m_in_cached": 0,
2572      "cost_per_1m_out_cached": 0.0065,
2573      "context_window": 131072,
2574      "default_max_tokens": 4096,
2575      "can_reason": false,
2576      "supports_attachments": false
2577    },
2578    {
2579      "id": "qwen/qwen-2.5-7b-instruct",
2580      "name": "Qwen: Qwen2.5 7B Instruct",
2581      "cost_per_1m_in": 0.04,
2582      "cost_per_1m_out": 0.1,
2583      "cost_per_1m_in_cached": 0,
2584      "cost_per_1m_out_cached": 0.04,
2585      "context_window": 32768,
2586      "default_max_tokens": 4096,
2587      "can_reason": false,
2588      "supports_attachments": false
2589    },
2590    {
2591      "id": "qwen/qwen3-14b",
2592      "name": "Qwen: Qwen3 14B",
2593      "cost_per_1m_in": 0.2275,
2594      "cost_per_1m_out": 0.91,
2595      "cost_per_1m_in_cached": 0,
2596      "cost_per_1m_out_cached": 0,
2597      "context_window": 131072,
2598      "default_max_tokens": 4096,
2599      "can_reason": true,
2600      "reasoning_levels": [
2601        "low",
2602        "medium",
2603        "high"
2604      ],
2605      "default_reasoning_effort": "medium",
2606      "supports_attachments": false
2607    },
2608    {
2609      "id": "qwen/qwen3-235b-a22b",
2610      "name": "Qwen: Qwen3 235B A22B",
2611      "cost_per_1m_in": 0.455,
2612      "cost_per_1m_out": 1.82,
2613      "cost_per_1m_in_cached": 0,
2614      "cost_per_1m_out_cached": 0,
2615      "context_window": 131072,
2616      "default_max_tokens": 4096,
2617      "can_reason": true,
2618      "reasoning_levels": [
2619        "low",
2620        "medium",
2621        "high"
2622      ],
2623      "default_reasoning_effort": "medium",
2624      "supports_attachments": false
2625    },
2626    {
2627      "id": "qwen/qwen3-235b-a22b-2507",
2628      "name": "Qwen: Qwen3 235B A22B Instruct 2507",
2629      "cost_per_1m_in": 0.22,
2630      "cost_per_1m_out": 0.88,
2631      "cost_per_1m_in_cached": 0,
2632      "cost_per_1m_out_cached": 0,
2633      "context_window": 262144,
2634      "default_max_tokens": 8192,
2635      "can_reason": false,
2636      "supports_attachments": false
2637    },
2638    {
2639      "id": "qwen/qwen3-235b-a22b-thinking-2507",
2640      "name": "Qwen: Qwen3 235B A22B Thinking 2507",
2641      "cost_per_1m_in": 0.13,
2642      "cost_per_1m_out": 0.6,
2643      "cost_per_1m_in_cached": 0,
2644      "cost_per_1m_out_cached": 0,
2645      "context_window": 262144,
2646      "default_max_tokens": 131072,
2647      "can_reason": true,
2648      "reasoning_levels": [
2649        "low",
2650        "medium",
2651        "high"
2652      ],
2653      "default_reasoning_effort": "medium",
2654      "supports_attachments": false
2655    },
2656    {
2657      "id": "qwen/qwen3-30b-a3b",
2658      "name": "Qwen: Qwen3 30B A3B",
2659      "cost_per_1m_in": 0.13,
2660      "cost_per_1m_out": 0.52,
2661      "cost_per_1m_in_cached": 0,
2662      "cost_per_1m_out_cached": 0,
2663      "context_window": 131072,
2664      "default_max_tokens": 4096,
2665      "can_reason": true,
2666      "reasoning_levels": [
2667        "low",
2668        "medium",
2669        "high"
2670      ],
2671      "default_reasoning_effort": "medium",
2672      "supports_attachments": false
2673    },
2674    {
2675      "id": "qwen/qwen3-30b-a3b-instruct-2507",
2676      "name": "Qwen: Qwen3 30B A3B Instruct 2507",
2677      "cost_per_1m_in": 0.1,
2678      "cost_per_1m_out": 0.3,
2679      "cost_per_1m_in_cached": 0,
2680      "cost_per_1m_out_cached": 0.1,
2681      "context_window": 262144,
2682      "default_max_tokens": 131072,
2683      "can_reason": false,
2684      "supports_attachments": false
2685    },
2686    {
2687      "id": "qwen/qwen3-30b-a3b-thinking-2507",
2688      "name": "Qwen: Qwen3 30B A3B Thinking 2507",
2689      "cost_per_1m_in": 0.08,
2690      "cost_per_1m_out": 0.4,
2691      "cost_per_1m_in_cached": 0,
2692      "cost_per_1m_out_cached": 0.08,
2693      "context_window": 131072,
2694      "default_max_tokens": 65536,
2695      "can_reason": true,
2696      "reasoning_levels": [
2697        "low",
2698        "medium",
2699        "high"
2700      ],
2701      "default_reasoning_effort": "medium",
2702      "supports_attachments": false
2703    },
2704    {
2705      "id": "qwen/qwen3-32b",
2706      "name": "Qwen: Qwen3 32B",
2707      "cost_per_1m_in": 0.104,
2708      "cost_per_1m_out": 0.416,
2709      "cost_per_1m_in_cached": 0,
2710      "cost_per_1m_out_cached": 0,
2711      "context_window": 131072,
2712      "default_max_tokens": 4096,
2713      "can_reason": true,
2714      "reasoning_levels": [
2715        "low",
2716        "medium",
2717        "high"
2718      ],
2719      "default_reasoning_effort": "medium",
2720      "supports_attachments": false
2721    },
2722    {
2723      "id": "qwen/qwen3-8b",
2724      "name": "Qwen: Qwen3 8B",
2725      "cost_per_1m_in": 0.117,
2726      "cost_per_1m_out": 0.455,
2727      "cost_per_1m_in_cached": 0,
2728      "cost_per_1m_out_cached": 0,
2729      "context_window": 131072,
2730      "default_max_tokens": 4096,
2731      "can_reason": true,
2732      "reasoning_levels": [
2733        "low",
2734        "medium",
2735        "high"
2736      ],
2737      "default_reasoning_effort": "medium",
2738      "supports_attachments": false
2739    },
2740    {
2741      "id": "qwen/qwen3-coder-30b-a3b-instruct",
2742      "name": "Qwen: Qwen3 Coder 30B A3B Instruct",
2743      "cost_per_1m_in": 0.2925,
2744      "cost_per_1m_out": 1.4625,
2745      "cost_per_1m_in_cached": 0,
2746      "cost_per_1m_out_cached": 0,
2747      "context_window": 262144,
2748      "default_max_tokens": 32768,
2749      "can_reason": false,
2750      "supports_attachments": false
2751    },
2752    {
2753      "id": "qwen/qwen3-coder",
2754      "name": "Qwen: Qwen3 Coder 480B A35B",
2755      "cost_per_1m_in": 0.22,
2756      "cost_per_1m_out": 1.8,
2757      "cost_per_1m_in_cached": 0,
2758      "cost_per_1m_out_cached": 0,
2759      "context_window": 262144,
2760      "default_max_tokens": 32768,
2761      "can_reason": false,
2762      "supports_attachments": false
2763    },
2764    {
2765      "id": "qwen/qwen3-coder:free",
2766      "name": "Qwen: Qwen3 Coder 480B A35B (free)",
2767      "cost_per_1m_in": 0,
2768      "cost_per_1m_out": 0,
2769      "cost_per_1m_in_cached": 0,
2770      "cost_per_1m_out_cached": 0,
2771      "context_window": 262000,
2772      "default_max_tokens": 131000,
2773      "can_reason": false,
2774      "supports_attachments": false
2775    },
2776    {
2777      "id": "qwen/qwen3-coder-flash",
2778      "name": "Qwen: Qwen3 Coder Flash",
2779      "cost_per_1m_in": 0.195,
2780      "cost_per_1m_out": 0.975,
2781      "cost_per_1m_in_cached": 0.24375,
2782      "cost_per_1m_out_cached": 0.039,
2783      "context_window": 1000000,
2784      "default_max_tokens": 32768,
2785      "can_reason": false,
2786      "supports_attachments": false
2787    },
2788    {
2789      "id": "qwen/qwen3-coder-next",
2790      "name": "Qwen: Qwen3 Coder Next",
2791      "cost_per_1m_in": 0.15,
2792      "cost_per_1m_out": 0.8,
2793      "cost_per_1m_in_cached": 0,
2794      "cost_per_1m_out_cached": 0.11,
2795      "context_window": 262144,
2796      "default_max_tokens": 131072,
2797      "can_reason": false,
2798      "supports_attachments": false
2799    },
2800    {
2801      "id": "qwen/qwen3-coder-plus",
2802      "name": "Qwen: Qwen3 Coder Plus",
2803      "cost_per_1m_in": 0.65,
2804      "cost_per_1m_out": 3.25,
2805      "cost_per_1m_in_cached": 0.8125,
2806      "cost_per_1m_out_cached": 0.13,
2807      "context_window": 1000000,
2808      "default_max_tokens": 32768,
2809      "can_reason": false,
2810      "supports_attachments": false
2811    },
2812    {
2813      "id": "qwen/qwen3-max",
2814      "name": "Qwen: Qwen3 Max",
2815      "cost_per_1m_in": 0.78,
2816      "cost_per_1m_out": 3.9,
2817      "cost_per_1m_in_cached": 0.975,
2818      "cost_per_1m_out_cached": 0.156,
2819      "context_window": 262144,
2820      "default_max_tokens": 16384,
2821      "can_reason": false,
2822      "supports_attachments": false
2823    },
2824    {
2825      "id": "qwen/qwen3-max-thinking",
2826      "name": "Qwen: Qwen3 Max Thinking",
2827      "cost_per_1m_in": 0.78,
2828      "cost_per_1m_out": 3.9,
2829      "cost_per_1m_in_cached": 0,
2830      "cost_per_1m_out_cached": 0,
2831      "context_window": 262144,
2832      "default_max_tokens": 16384,
2833      "can_reason": true,
2834      "reasoning_levels": [
2835        "low",
2836        "medium",
2837        "high"
2838      ],
2839      "default_reasoning_effort": "medium",
2840      "supports_attachments": false
2841    },
2842    {
2843      "id": "qwen/qwen3-next-80b-a3b-instruct",
2844      "name": "Qwen: Qwen3 Next 80B A3B Instruct",
2845      "cost_per_1m_in": 0.09,
2846      "cost_per_1m_out": 1.1,
2847      "cost_per_1m_in_cached": 0,
2848      "cost_per_1m_out_cached": 0,
2849      "context_window": 262144,
2850      "default_max_tokens": 26214,
2851      "can_reason": false,
2852      "supports_attachments": false
2853    },
2854    {
2855      "id": "qwen/qwen3-next-80b-a3b-instruct:free",
2856      "name": "Qwen: Qwen3 Next 80B A3B Instruct (free)",
2857      "cost_per_1m_in": 0,
2858      "cost_per_1m_out": 0,
2859      "cost_per_1m_in_cached": 0,
2860      "cost_per_1m_out_cached": 0,
2861      "context_window": 262144,
2862      "default_max_tokens": 26214,
2863      "can_reason": false,
2864      "supports_attachments": false
2865    },
2866    {
2867      "id": "qwen/qwen3-next-80b-a3b-thinking",
2868      "name": "Qwen: Qwen3 Next 80B A3B Thinking",
2869      "cost_per_1m_in": 0.15,
2870      "cost_per_1m_out": 1.5,
2871      "cost_per_1m_in_cached": 0,
2872      "cost_per_1m_out_cached": 0.15,
2873      "context_window": 262144,
2874      "default_max_tokens": 16384,
2875      "can_reason": true,
2876      "reasoning_levels": [
2877        "low",
2878        "medium",
2879        "high"
2880      ],
2881      "default_reasoning_effort": "medium",
2882      "supports_attachments": false
2883    },
2884    {
2885      "id": "qwen/qwen3-vl-235b-a22b-instruct",
2886      "name": "Qwen: Qwen3 VL 235B A22B Instruct",
2887      "cost_per_1m_in": 0.26,
2888      "cost_per_1m_out": 1.04,
2889      "cost_per_1m_in_cached": 0,
2890      "cost_per_1m_out_cached": 0,
2891      "context_window": 131072,
2892      "default_max_tokens": 16384,
2893      "can_reason": false,
2894      "supports_attachments": true
2895    },
2896    {
2897      "id": "qwen/qwen3-vl-235b-a22b-thinking",
2898      "name": "Qwen: Qwen3 VL 235B A22B Thinking",
2899      "cost_per_1m_in": 0.26,
2900      "cost_per_1m_out": 2.6,
2901      "cost_per_1m_in_cached": 0,
2902      "cost_per_1m_out_cached": 0,
2903      "context_window": 131072,
2904      "default_max_tokens": 16384,
2905      "can_reason": true,
2906      "reasoning_levels": [
2907        "low",
2908        "medium",
2909        "high"
2910      ],
2911      "default_reasoning_effort": "medium",
2912      "supports_attachments": true
2913    },
2914    {
2915      "id": "qwen/qwen3-vl-30b-a3b-instruct",
2916      "name": "Qwen: Qwen3 VL 30B A3B Instruct",
2917      "cost_per_1m_in": 0.29,
2918      "cost_per_1m_out": 1,
2919      "cost_per_1m_in_cached": 0,
2920      "cost_per_1m_out_cached": 0,
2921      "context_window": 262144,
2922      "default_max_tokens": 131072,
2923      "can_reason": false,
2924      "supports_attachments": true
2925    },
2926    {
2927      "id": "qwen/qwen3-vl-30b-a3b-thinking",
2928      "name": "Qwen: Qwen3 VL 30B A3B Thinking",
2929      "cost_per_1m_in": 0.13,
2930      "cost_per_1m_out": 1.56,
2931      "cost_per_1m_in_cached": 0,
2932      "cost_per_1m_out_cached": 0,
2933      "context_window": 131072,
2934      "default_max_tokens": 16384,
2935      "can_reason": true,
2936      "reasoning_levels": [
2937        "low",
2938        "medium",
2939        "high"
2940      ],
2941      "default_reasoning_effort": "medium",
2942      "supports_attachments": true
2943    },
2944    {
2945      "id": "qwen/qwen3-vl-32b-instruct",
2946      "name": "Qwen: Qwen3 VL 32B Instruct",
2947      "cost_per_1m_in": 0.104,
2948      "cost_per_1m_out": 0.416,
2949      "cost_per_1m_in_cached": 0,
2950      "cost_per_1m_out_cached": 0,
2951      "context_window": 131072,
2952      "default_max_tokens": 16384,
2953      "can_reason": false,
2954      "supports_attachments": true
2955    },
2956    {
2957      "id": "qwen/qwen3-vl-8b-instruct",
2958      "name": "Qwen: Qwen3 VL 8B Instruct",
2959      "cost_per_1m_in": 0.25,
2960      "cost_per_1m_out": 0.75,
2961      "cost_per_1m_in_cached": 0,
2962      "cost_per_1m_out_cached": 0.12,
2963      "context_window": 262144,
2964      "default_max_tokens": 131072,
2965      "can_reason": false,
2966      "supports_attachments": true
2967    },
2968    {
2969      "id": "qwen/qwen3-vl-8b-thinking",
2970      "name": "Qwen: Qwen3 VL 8B Thinking",
2971      "cost_per_1m_in": 0.117,
2972      "cost_per_1m_out": 1.365,
2973      "cost_per_1m_in_cached": 0,
2974      "cost_per_1m_out_cached": 0,
2975      "context_window": 131072,
2976      "default_max_tokens": 16384,
2977      "can_reason": true,
2978      "reasoning_levels": [
2979        "low",
2980        "medium",
2981        "high"
2982      ],
2983      "default_reasoning_effort": "medium",
2984      "supports_attachments": true
2985    },
2986    {
2987      "id": "qwen/qwen3.5-397b-a17b",
2988      "name": "Qwen: Qwen3.5 397B A17B",
2989      "cost_per_1m_in": 0.6,
2990      "cost_per_1m_out": 3.6,
2991      "cost_per_1m_in_cached": 0,
2992      "cost_per_1m_out_cached": 0.3,
2993      "context_window": 262144,
2994      "default_max_tokens": 131072,
2995      "can_reason": true,
2996      "reasoning_levels": [
2997        "low",
2998        "medium",
2999        "high"
3000      ],
3001      "default_reasoning_effort": "medium",
3002      "supports_attachments": true
3003    },
3004    {
3005      "id": "qwen/qwen3.5-plus-02-15",
3006      "name": "Qwen: Qwen3.5 Plus 2026-02-15",
3007      "cost_per_1m_in": 0.26,
3008      "cost_per_1m_out": 1.56,
3009      "cost_per_1m_in_cached": 0.325,
3010      "cost_per_1m_out_cached": 0,
3011      "context_window": 1000000,
3012      "default_max_tokens": 32768,
3013      "can_reason": true,
3014      "reasoning_levels": [
3015        "low",
3016        "medium",
3017        "high"
3018      ],
3019      "default_reasoning_effort": "medium",
3020      "supports_attachments": true
3021    },
3022    {
3023      "id": "qwen/qwen3.5-122b-a10b",
3024      "name": "Qwen: Qwen3.5-122B-A10B",
3025      "cost_per_1m_in": 0.3,
3026      "cost_per_1m_out": 2.4,
3027      "cost_per_1m_in_cached": 0,
3028      "cost_per_1m_out_cached": 0.3,
3029      "context_window": 262144,
3030      "default_max_tokens": 32768,
3031      "can_reason": true,
3032      "reasoning_levels": [
3033        "low",
3034        "medium",
3035        "high"
3036      ],
3037      "default_reasoning_effort": "medium",
3038      "supports_attachments": true
3039    },
3040    {
3041      "id": "qwen/qwen3.5-27b",
3042      "name": "Qwen: Qwen3.5-27B",
3043      "cost_per_1m_in": 0.27,
3044      "cost_per_1m_out": 2.16,
3045      "cost_per_1m_in_cached": 0,
3046      "cost_per_1m_out_cached": 0.27,
3047      "context_window": 262144,
3048      "default_max_tokens": 32768,
3049      "can_reason": true,
3050      "reasoning_levels": [
3051        "low",
3052        "medium",
3053        "high"
3054      ],
3055      "default_reasoning_effort": "medium",
3056      "supports_attachments": true
3057    },
3058    {
3059      "id": "qwen/qwen3.5-35b-a3b",
3060      "name": "Qwen: Qwen3.5-35B-A3B",
3061      "cost_per_1m_in": 0.225,
3062      "cost_per_1m_out": 1.8,
3063      "cost_per_1m_in_cached": 0,
3064      "cost_per_1m_out_cached": 0.225,
3065      "context_window": 262144,
3066      "default_max_tokens": 32768,
3067      "can_reason": true,
3068      "reasoning_levels": [
3069        "low",
3070        "medium",
3071        "high"
3072      ],
3073      "default_reasoning_effort": "medium",
3074      "supports_attachments": true
3075    },
3076    {
3077      "id": "qwen/qwen3.5-9b",
3078      "name": "Qwen: Qwen3.5-9B",
3079      "cost_per_1m_in": 0.1,
3080      "cost_per_1m_out": 0.15,
3081      "cost_per_1m_in_cached": 0,
3082      "cost_per_1m_out_cached": 0,
3083      "context_window": 262144,
3084      "default_max_tokens": 26214,
3085      "can_reason": true,
3086      "reasoning_levels": [
3087        "low",
3088        "medium",
3089        "high"
3090      ],
3091      "default_reasoning_effort": "medium",
3092      "supports_attachments": true
3093    },
3094    {
3095      "id": "qwen/qwen3.5-flash-02-23",
3096      "name": "Qwen: Qwen3.5-Flash",
3097      "cost_per_1m_in": 0.065,
3098      "cost_per_1m_out": 0.26,
3099      "cost_per_1m_in_cached": 0.08125,
3100      "cost_per_1m_out_cached": 0,
3101      "context_window": 1000000,
3102      "default_max_tokens": 32768,
3103      "can_reason": true,
3104      "reasoning_levels": [
3105        "low",
3106        "medium",
3107        "high"
3108      ],
3109      "default_reasoning_effort": "medium",
3110      "supports_attachments": true
3111    },
3112    {
3113      "id": "qwen/qwen3.6-plus",
3114      "name": "Qwen: Qwen3.6 Plus",
3115      "cost_per_1m_in": 0.325,
3116      "cost_per_1m_out": 1.95,
3117      "cost_per_1m_in_cached": 0.40625,
3118      "cost_per_1m_out_cached": 0,
3119      "context_window": 1000000,
3120      "default_max_tokens": 32768,
3121      "can_reason": true,
3122      "reasoning_levels": [
3123        "low",
3124        "medium",
3125        "high"
3126      ],
3127      "default_reasoning_effort": "medium",
3128      "supports_attachments": true
3129    },
3130    {
3131      "id": "relace/relace-search",
3132      "name": "Relace: Relace Search",
3133      "cost_per_1m_in": 1,
3134      "cost_per_1m_out": 3,
3135      "cost_per_1m_in_cached": 0,
3136      "cost_per_1m_out_cached": 0,
3137      "context_window": 256000,
3138      "default_max_tokens": 64000,
3139      "can_reason": false,
3140      "supports_attachments": false
3141    },
3142    {
3143      "id": "stepfun/step-3.5-flash",
3144      "name": "StepFun: Step 3.5 Flash",
3145      "cost_per_1m_in": 0.1,
3146      "cost_per_1m_out": 0.3,
3147      "cost_per_1m_in_cached": 0,
3148      "cost_per_1m_out_cached": 0,
3149      "context_window": 262144,
3150      "default_max_tokens": 32768,
3151      "can_reason": true,
3152      "reasoning_levels": [
3153        "low",
3154        "medium",
3155        "high"
3156      ],
3157      "default_reasoning_effort": "medium",
3158      "supports_attachments": false
3159    },
3160    {
3161      "id": "tngtech/deepseek-r1t2-chimera",
3162      "name": "TNG: DeepSeek R1T2 Chimera",
3163      "cost_per_1m_in": 0.3,
3164      "cost_per_1m_out": 1.1,
3165      "cost_per_1m_in_cached": 0,
3166      "cost_per_1m_out_cached": 0.15,
3167      "context_window": 163840,
3168      "default_max_tokens": 81920,
3169      "can_reason": true,
3170      "reasoning_levels": [
3171        "low",
3172        "medium",
3173        "high"
3174      ],
3175      "default_reasoning_effort": "medium",
3176      "supports_attachments": false
3177    },
3178    {
3179      "id": "thedrummer/rocinante-12b",
3180      "name": "TheDrummer: Rocinante 12B",
3181      "cost_per_1m_in": 0.17,
3182      "cost_per_1m_out": 0.43,
3183      "cost_per_1m_in_cached": 0,
3184      "cost_per_1m_out_cached": 0,
3185      "context_window": 32768,
3186      "default_max_tokens": 16384,
3187      "can_reason": false,
3188      "supports_attachments": false
3189    },
3190    {
3191      "id": "thedrummer/unslopnemo-12b",
3192      "name": "TheDrummer: UnslopNemo 12B",
3193      "cost_per_1m_in": 0.4,
3194      "cost_per_1m_out": 0.4,
3195      "cost_per_1m_in_cached": 0,
3196      "cost_per_1m_out_cached": 0,
3197      "context_window": 32768,
3198      "default_max_tokens": 16384,
3199      "can_reason": false,
3200      "supports_attachments": false
3201    },
3202    {
3203      "id": "alibaba/tongyi-deepresearch-30b-a3b",
3204      "name": "Tongyi DeepResearch 30B A3B",
3205      "cost_per_1m_in": 0.09,
3206      "cost_per_1m_out": 0.45,
3207      "cost_per_1m_in_cached": 0,
3208      "cost_per_1m_out_cached": 0.09,
3209      "context_window": 131072,
3210      "default_max_tokens": 65536,
3211      "can_reason": true,
3212      "reasoning_levels": [
3213        "low",
3214        "medium",
3215        "high"
3216      ],
3217      "default_reasoning_effort": "medium",
3218      "supports_attachments": false
3219    },
3220    {
3221      "id": "upstage/solar-pro-3",
3222      "name": "Upstage: Solar Pro 3",
3223      "cost_per_1m_in": 0.15,
3224      "cost_per_1m_out": 0.6,
3225      "cost_per_1m_in_cached": 0,
3226      "cost_per_1m_out_cached": 0.015,
3227      "context_window": 128000,
3228      "default_max_tokens": 12800,
3229      "can_reason": true,
3230      "reasoning_levels": [
3231        "low",
3232        "medium",
3233        "high"
3234      ],
3235      "default_reasoning_effort": "medium",
3236      "supports_attachments": false
3237    },
3238    {
3239      "id": "xiaomi/mimo-v2-flash",
3240      "name": "Xiaomi: MiMo-V2-Flash",
3241      "cost_per_1m_in": 0.1,
3242      "cost_per_1m_out": 0.3,
3243      "cost_per_1m_in_cached": 0,
3244      "cost_per_1m_out_cached": 0.01,
3245      "context_window": 262144,
3246      "default_max_tokens": 32768,
3247      "can_reason": true,
3248      "reasoning_levels": [
3249        "low",
3250        "medium",
3251        "high"
3252      ],
3253      "default_reasoning_effort": "medium",
3254      "supports_attachments": false
3255    },
3256    {
3257      "id": "xiaomi/mimo-v2-omni",
3258      "name": "Xiaomi: MiMo-V2-Omni",
3259      "cost_per_1m_in": 0.4,
3260      "cost_per_1m_out": 2,
3261      "cost_per_1m_in_cached": 0,
3262      "cost_per_1m_out_cached": 0.08,
3263      "context_window": 262144,
3264      "default_max_tokens": 32768,
3265      "can_reason": true,
3266      "reasoning_levels": [
3267        "low",
3268        "medium",
3269        "high"
3270      ],
3271      "default_reasoning_effort": "medium",
3272      "supports_attachments": true
3273    },
3274    {
3275      "id": "xiaomi/mimo-v2-pro",
3276      "name": "Xiaomi: MiMo-V2-Pro",
3277      "cost_per_1m_in": 1,
3278      "cost_per_1m_out": 3,
3279      "cost_per_1m_in_cached": 0,
3280      "cost_per_1m_out_cached": 0.2,
3281      "context_window": 1048576,
3282      "default_max_tokens": 65536,
3283      "can_reason": true,
3284      "reasoning_levels": [
3285        "low",
3286        "medium",
3287        "high"
3288      ],
3289      "default_reasoning_effort": "medium",
3290      "supports_attachments": false
3291    },
3292    {
3293      "id": "z-ai/glm-4-32b",
3294      "name": "Z.ai: GLM 4 32B ",
3295      "cost_per_1m_in": 0.1,
3296      "cost_per_1m_out": 0.1,
3297      "cost_per_1m_in_cached": 0,
3298      "cost_per_1m_out_cached": 0,
3299      "context_window": 128000,
3300      "default_max_tokens": 12800,
3301      "can_reason": false,
3302      "supports_attachments": false
3303    },
3304    {
3305      "id": "z-ai/glm-4.5",
3306      "name": "Z.ai: GLM 4.5",
3307      "cost_per_1m_in": 0.6,
3308      "cost_per_1m_out": 2.2,
3309      "cost_per_1m_in_cached": 0,
3310      "cost_per_1m_out_cached": 0.11,
3311      "context_window": 131072,
3312      "default_max_tokens": 49152,
3313      "can_reason": true,
3314      "reasoning_levels": [
3315        "low",
3316        "medium",
3317        "high"
3318      ],
3319      "default_reasoning_effort": "medium",
3320      "supports_attachments": false
3321    },
3322    {
3323      "id": "z-ai/glm-4.5-air",
3324      "name": "Z.ai: GLM 4.5 Air",
3325      "cost_per_1m_in": 0.13,
3326      "cost_per_1m_out": 0.85,
3327      "cost_per_1m_in_cached": 0,
3328      "cost_per_1m_out_cached": 0.025,
3329      "context_window": 131072,
3330      "default_max_tokens": 49152,
3331      "can_reason": true,
3332      "reasoning_levels": [
3333        "low",
3334        "medium",
3335        "high"
3336      ],
3337      "default_reasoning_effort": "medium",
3338      "supports_attachments": false
3339    },
3340    {
3341      "id": "z-ai/glm-4.5-air:free",
3342      "name": "Z.ai: GLM 4.5 Air (free)",
3343      "cost_per_1m_in": 0,
3344      "cost_per_1m_out": 0,
3345      "cost_per_1m_in_cached": 0,
3346      "cost_per_1m_out_cached": 0,
3347      "context_window": 131072,
3348      "default_max_tokens": 48000,
3349      "can_reason": true,
3350      "reasoning_levels": [
3351        "low",
3352        "medium",
3353        "high"
3354      ],
3355      "default_reasoning_effort": "medium",
3356      "supports_attachments": false
3357    },
3358    {
3359      "id": "z-ai/glm-4.5v",
3360      "name": "Z.ai: GLM 4.5V",
3361      "cost_per_1m_in": 0.6,
3362      "cost_per_1m_out": 1.8,
3363      "cost_per_1m_in_cached": 0,
3364      "cost_per_1m_out_cached": 0.11,
3365      "context_window": 65536,
3366      "default_max_tokens": 8192,
3367      "can_reason": true,
3368      "reasoning_levels": [
3369        "low",
3370        "medium",
3371        "high"
3372      ],
3373      "default_reasoning_effort": "medium",
3374      "supports_attachments": true
3375    },
3376    {
3377      "id": "z-ai/glm-4.6",
3378      "name": "Z.ai: GLM 4.6",
3379      "cost_per_1m_in": 0.39,
3380      "cost_per_1m_out": 1.9,
3381      "cost_per_1m_in_cached": 0,
3382      "cost_per_1m_out_cached": 0,
3383      "context_window": 204800,
3384      "default_max_tokens": 102400,
3385      "can_reason": true,
3386      "reasoning_levels": [
3387        "low",
3388        "medium",
3389        "high"
3390      ],
3391      "default_reasoning_effort": "medium",
3392      "supports_attachments": false
3393    },
3394    {
3395      "id": "z-ai/glm-4.6v",
3396      "name": "Z.ai: GLM 4.6V",
3397      "cost_per_1m_in": 0.3,
3398      "cost_per_1m_out": 0.9,
3399      "cost_per_1m_in_cached": 0,
3400      "cost_per_1m_out_cached": 0,
3401      "context_window": 131072,
3402      "default_max_tokens": 65536,
3403      "can_reason": true,
3404      "reasoning_levels": [
3405        "low",
3406        "medium",
3407        "high"
3408      ],
3409      "default_reasoning_effort": "medium",
3410      "supports_attachments": true
3411    },
3412    {
3413      "id": "z-ai/glm-4.7",
3414      "name": "Z.ai: GLM 4.7",
3415      "cost_per_1m_in": 0.45,
3416      "cost_per_1m_out": 2.2,
3417      "cost_per_1m_in_cached": 0,
3418      "cost_per_1m_out_cached": 0.11,
3419      "context_window": 204800,
3420      "default_max_tokens": 102400,
3421      "can_reason": true,
3422      "reasoning_levels": [
3423        "low",
3424        "medium",
3425        "high"
3426      ],
3427      "default_reasoning_effort": "medium",
3428      "supports_attachments": false
3429    },
3430    {
3431      "id": "z-ai/glm-4.7-flash",
3432      "name": "Z.ai: GLM 4.7 Flash",
3433      "cost_per_1m_in": 0.06,
3434      "cost_per_1m_out": 0.4,
3435      "cost_per_1m_in_cached": 0,
3436      "cost_per_1m_out_cached": 0.01,
3437      "context_window": 202752,
3438      "default_max_tokens": 20275,
3439      "can_reason": true,
3440      "reasoning_levels": [
3441        "low",
3442        "medium",
3443        "high"
3444      ],
3445      "default_reasoning_effort": "medium",
3446      "supports_attachments": false
3447    },
3448    {
3449      "id": "z-ai/glm-5",
3450      "name": "Z.ai: GLM 5",
3451      "cost_per_1m_in": 0.95,
3452      "cost_per_1m_out": 2.55,
3453      "cost_per_1m_in_cached": 0,
3454      "cost_per_1m_out_cached": 0.2,
3455      "context_window": 204800,
3456      "default_max_tokens": 65536,
3457      "can_reason": true,
3458      "reasoning_levels": [
3459        "low",
3460        "medium",
3461        "high"
3462      ],
3463      "default_reasoning_effort": "medium",
3464      "supports_attachments": false
3465    },
3466    {
3467      "id": "z-ai/glm-5-turbo",
3468      "name": "Z.ai: GLM 5 Turbo",
3469      "cost_per_1m_in": 1.2,
3470      "cost_per_1m_out": 4,
3471      "cost_per_1m_in_cached": 0,
3472      "cost_per_1m_out_cached": 0.24,
3473      "context_window": 262144,
3474      "default_max_tokens": 65536,
3475      "can_reason": true,
3476      "reasoning_levels": [
3477        "low",
3478        "medium",
3479        "high"
3480      ],
3481      "default_reasoning_effort": "medium",
3482      "supports_attachments": false
3483    },
3484    {
3485      "id": "z-ai/glm-5.1",
3486      "name": "Z.ai: GLM 5.1",
3487      "cost_per_1m_in": 1.4,
3488      "cost_per_1m_out": 4.4,
3489      "cost_per_1m_in_cached": 0,
3490      "cost_per_1m_out_cached": 0.26,
3491      "context_window": 204800,
3492      "default_max_tokens": 65536,
3493      "can_reason": true,
3494      "reasoning_levels": [
3495        "low",
3496        "medium",
3497        "high"
3498      ],
3499      "default_reasoning_effort": "medium",
3500      "supports_attachments": false
3501    },
3502    {
3503      "id": "z-ai/glm-5v-turbo",
3504      "name": "Z.ai: GLM 5V Turbo",
3505      "cost_per_1m_in": 1.2,
3506      "cost_per_1m_out": 4,
3507      "cost_per_1m_in_cached": 0,
3508      "cost_per_1m_out_cached": 0.24,
3509      "context_window": 202752,
3510      "default_max_tokens": 65536,
3511      "can_reason": true,
3512      "reasoning_levels": [
3513        "low",
3514        "medium",
3515        "high"
3516      ],
3517      "default_reasoning_effort": "medium",
3518      "supports_attachments": true
3519    },
3520    {
3521      "id": "x-ai/grok-3",
3522      "name": "xAI: Grok 3",
3523      "cost_per_1m_in": 3,
3524      "cost_per_1m_out": 15,
3525      "cost_per_1m_in_cached": 0,
3526      "cost_per_1m_out_cached": 0.75,
3527      "context_window": 131072,
3528      "default_max_tokens": 13107,
3529      "can_reason": false,
3530      "supports_attachments": false
3531    },
3532    {
3533      "id": "x-ai/grok-3-beta",
3534      "name": "xAI: Grok 3 Beta",
3535      "cost_per_1m_in": 3,
3536      "cost_per_1m_out": 15,
3537      "cost_per_1m_in_cached": 0,
3538      "cost_per_1m_out_cached": 0.75,
3539      "context_window": 131072,
3540      "default_max_tokens": 13107,
3541      "can_reason": false,
3542      "supports_attachments": false
3543    },
3544    {
3545      "id": "x-ai/grok-3-mini",
3546      "name": "xAI: Grok 3 Mini",
3547      "cost_per_1m_in": 0.3,
3548      "cost_per_1m_out": 0.5,
3549      "cost_per_1m_in_cached": 0,
3550      "cost_per_1m_out_cached": 0.075,
3551      "context_window": 131072,
3552      "default_max_tokens": 13107,
3553      "can_reason": true,
3554      "reasoning_levels": [
3555        "low",
3556        "medium",
3557        "high"
3558      ],
3559      "default_reasoning_effort": "medium",
3560      "supports_attachments": false
3561    },
3562    {
3563      "id": "x-ai/grok-3-mini-beta",
3564      "name": "xAI: Grok 3 Mini Beta",
3565      "cost_per_1m_in": 0.3,
3566      "cost_per_1m_out": 0.5,
3567      "cost_per_1m_in_cached": 0,
3568      "cost_per_1m_out_cached": 0.075,
3569      "context_window": 131072,
3570      "default_max_tokens": 13107,
3571      "can_reason": true,
3572      "reasoning_levels": [
3573        "low",
3574        "medium",
3575        "high"
3576      ],
3577      "default_reasoning_effort": "medium",
3578      "supports_attachments": false
3579    },
3580    {
3581      "id": "x-ai/grok-4",
3582      "name": "xAI: Grok 4",
3583      "cost_per_1m_in": 3,
3584      "cost_per_1m_out": 15,
3585      "cost_per_1m_in_cached": 0,
3586      "cost_per_1m_out_cached": 0.75,
3587      "context_window": 256000,
3588      "default_max_tokens": 25600,
3589      "can_reason": true,
3590      "reasoning_levels": [
3591        "low",
3592        "medium",
3593        "high"
3594      ],
3595      "default_reasoning_effort": "medium",
3596      "supports_attachments": true
3597    },
3598    {
3599      "id": "x-ai/grok-4-fast",
3600      "name": "xAI: Grok 4 Fast",
3601      "cost_per_1m_in": 0.2,
3602      "cost_per_1m_out": 0.5,
3603      "cost_per_1m_in_cached": 0,
3604      "cost_per_1m_out_cached": 0.05,
3605      "context_window": 2000000,
3606      "default_max_tokens": 15000,
3607      "can_reason": true,
3608      "reasoning_levels": [
3609        "low",
3610        "medium",
3611        "high"
3612      ],
3613      "default_reasoning_effort": "medium",
3614      "supports_attachments": true
3615    },
3616    {
3617      "id": "x-ai/grok-4.1-fast",
3618      "name": "xAI: Grok 4.1 Fast",
3619      "cost_per_1m_in": 0.2,
3620      "cost_per_1m_out": 0.5,
3621      "cost_per_1m_in_cached": 0,
3622      "cost_per_1m_out_cached": 0.05,
3623      "context_window": 2000000,
3624      "default_max_tokens": 15000,
3625      "can_reason": true,
3626      "reasoning_levels": [
3627        "low",
3628        "medium",
3629        "high"
3630      ],
3631      "default_reasoning_effort": "medium",
3632      "supports_attachments": true
3633    },
3634    {
3635      "id": "x-ai/grok-4.20",
3636      "name": "xAI: Grok 4.20",
3637      "cost_per_1m_in": 2,
3638      "cost_per_1m_out": 6,
3639      "cost_per_1m_in_cached": 0,
3640      "cost_per_1m_out_cached": 0.2,
3641      "context_window": 2000000,
3642      "default_max_tokens": 200000,
3643      "can_reason": true,
3644      "reasoning_levels": [
3645        "low",
3646        "medium",
3647        "high"
3648      ],
3649      "default_reasoning_effort": "medium",
3650      "supports_attachments": true
3651    },
3652    {
3653      "id": "x-ai/grok-code-fast-1",
3654      "name": "xAI: Grok Code Fast 1",
3655      "cost_per_1m_in": 0.2,
3656      "cost_per_1m_out": 1.5,
3657      "cost_per_1m_in_cached": 0,
3658      "cost_per_1m_out_cached": 0.02,
3659      "context_window": 256000,
3660      "default_max_tokens": 5000,
3661      "can_reason": true,
3662      "reasoning_levels": [
3663        "low",
3664        "medium",
3665        "high"
3666      ],
3667      "default_reasoning_effort": "medium",
3668      "supports_attachments": false
3669    }
3670  ],
3671  "default_headers": {
3672    "HTTP-Referer": "https://charm.land",
3673    "X-Title": "Crush"
3674  }
3675}