openrouter.json

   1{
   2  "name": "OpenRouter",
   3  "id": "openrouter",
   4  "api_key": "$OPENROUTER_API_KEY",
   5  "api_endpoint": "https://openrouter.ai/api/v1",
   6  "type": "openrouter",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-3.5-haiku",
   9  "models": [
  10    {
  11      "id": "ai21/jamba-large-1.7",
  12      "name": "AI21: Jamba Large 1.7",
  13      "cost_per_1m_in": 2,
  14      "cost_per_1m_out": 8,
  15      "cost_per_1m_in_cached": 0,
  16      "cost_per_1m_out_cached": 0,
  17      "context_window": 256000,
  18      "default_max_tokens": 2048,
  19      "can_reason": false,
  20      "supports_attachments": false
  21    },
  22    {
  23      "id": "allenai/olmo-3.1-32b-instruct",
  24      "name": "AllenAI: Olmo 3.1 32B Instruct",
  25      "cost_per_1m_in": 0.2,
  26      "cost_per_1m_out": 0.6,
  27      "cost_per_1m_in_cached": 0,
  28      "cost_per_1m_out_cached": 0,
  29      "context_window": 65536,
  30      "default_max_tokens": 8192,
  31      "can_reason": false,
  32      "supports_attachments": false
  33    },
  34    {
  35      "id": "amazon/nova-2-lite-v1",
  36      "name": "Amazon: Nova 2 Lite",
  37      "cost_per_1m_in": 0.3,
  38      "cost_per_1m_out": 2.5,
  39      "cost_per_1m_in_cached": 0,
  40      "cost_per_1m_out_cached": 0,
  41      "context_window": 1000000,
  42      "default_max_tokens": 32767,
  43      "can_reason": true,
  44      "reasoning_levels": [
  45        "low",
  46        "medium",
  47        "high"
  48      ],
  49      "default_reasoning_effort": "medium",
  50      "supports_attachments": true
  51    },
  52    {
  53      "id": "amazon/nova-lite-v1",
  54      "name": "Amazon: Nova Lite 1.0",
  55      "cost_per_1m_in": 0.06,
  56      "cost_per_1m_out": 0.24,
  57      "cost_per_1m_in_cached": 0,
  58      "cost_per_1m_out_cached": 0,
  59      "context_window": 300000,
  60      "default_max_tokens": 2560,
  61      "can_reason": false,
  62      "supports_attachments": true
  63    },
  64    {
  65      "id": "amazon/nova-micro-v1",
  66      "name": "Amazon: Nova Micro 1.0",
  67      "cost_per_1m_in": 0.035,
  68      "cost_per_1m_out": 0.14,
  69      "cost_per_1m_in_cached": 0,
  70      "cost_per_1m_out_cached": 0,
  71      "context_window": 128000,
  72      "default_max_tokens": 2560,
  73      "can_reason": false,
  74      "supports_attachments": false
  75    },
  76    {
  77      "id": "amazon/nova-premier-v1",
  78      "name": "Amazon: Nova Premier 1.0",
  79      "cost_per_1m_in": 2.5,
  80      "cost_per_1m_out": 12.5,
  81      "cost_per_1m_in_cached": 0,
  82      "cost_per_1m_out_cached": 0.625,
  83      "context_window": 1000000,
  84      "default_max_tokens": 16000,
  85      "can_reason": false,
  86      "supports_attachments": true
  87    },
  88    {
  89      "id": "amazon/nova-pro-v1",
  90      "name": "Amazon: Nova Pro 1.0",
  91      "cost_per_1m_in": 0.8,
  92      "cost_per_1m_out": 3.2,
  93      "cost_per_1m_in_cached": 0,
  94      "cost_per_1m_out_cached": 0,
  95      "context_window": 300000,
  96      "default_max_tokens": 2560,
  97      "can_reason": false,
  98      "supports_attachments": true
  99    },
 100    {
 101      "id": "anthropic/claude-3-haiku",
 102      "name": "Anthropic: Claude 3 Haiku",
 103      "cost_per_1m_in": 0.25,
 104      "cost_per_1m_out": 1.25,
 105      "cost_per_1m_in_cached": 0.3,
 106      "cost_per_1m_out_cached": 0.03,
 107      "context_window": 200000,
 108      "default_max_tokens": 2048,
 109      "can_reason": false,
 110      "supports_attachments": true
 111    },
 112    {
 113      "id": "anthropic/claude-3.5-haiku",
 114      "name": "Anthropic: Claude 3.5 Haiku",
 115      "cost_per_1m_in": 0.8,
 116      "cost_per_1m_out": 4,
 117      "cost_per_1m_in_cached": 1,
 118      "cost_per_1m_out_cached": 0.08,
 119      "context_window": 200000,
 120      "default_max_tokens": 4096,
 121      "can_reason": false,
 122      "supports_attachments": true
 123    },
 124    {
 125      "id": "anthropic/claude-3.7-sonnet",
 126      "name": "Anthropic: Claude 3.7 Sonnet",
 127      "cost_per_1m_in": 3,
 128      "cost_per_1m_out": 15,
 129      "cost_per_1m_in_cached": 3.75,
 130      "cost_per_1m_out_cached": 0.3,
 131      "context_window": 200000,
 132      "default_max_tokens": 32000,
 133      "can_reason": true,
 134      "reasoning_levels": [
 135        "low",
 136        "medium",
 137        "high"
 138      ],
 139      "default_reasoning_effort": "medium",
 140      "supports_attachments": true
 141    },
 142    {
 143      "id": "anthropic/claude-3.7-sonnet:thinking",
 144      "name": "Anthropic: Claude 3.7 Sonnet (thinking)",
 145      "cost_per_1m_in": 3,
 146      "cost_per_1m_out": 15,
 147      "cost_per_1m_in_cached": 3.75,
 148      "cost_per_1m_out_cached": 0.3,
 149      "context_window": 200000,
 150      "default_max_tokens": 32000,
 151      "can_reason": true,
 152      "reasoning_levels": [
 153        "low",
 154        "medium",
 155        "high"
 156      ],
 157      "default_reasoning_effort": "medium",
 158      "supports_attachments": true
 159    },
 160    {
 161      "id": "anthropic/claude-haiku-4.5",
 162      "name": "Anthropic: Claude Haiku 4.5",
 163      "cost_per_1m_in": 1,
 164      "cost_per_1m_out": 5,
 165      "cost_per_1m_in_cached": 1.25,
 166      "cost_per_1m_out_cached": 0.1,
 167      "context_window": 200000,
 168      "default_max_tokens": 32000,
 169      "can_reason": true,
 170      "reasoning_levels": [
 171        "low",
 172        "medium",
 173        "high"
 174      ],
 175      "default_reasoning_effort": "medium",
 176      "supports_attachments": true
 177    },
 178    {
 179      "id": "anthropic/claude-opus-4",
 180      "name": "Anthropic: Claude Opus 4",
 181      "cost_per_1m_in": 15,
 182      "cost_per_1m_out": 75,
 183      "cost_per_1m_in_cached": 18.75,
 184      "cost_per_1m_out_cached": 1.5,
 185      "context_window": 200000,
 186      "default_max_tokens": 16000,
 187      "can_reason": true,
 188      "reasoning_levels": [
 189        "low",
 190        "medium",
 191        "high"
 192      ],
 193      "default_reasoning_effort": "medium",
 194      "supports_attachments": true
 195    },
 196    {
 197      "id": "anthropic/claude-opus-4.1",
 198      "name": "Anthropic: Claude Opus 4.1",
 199      "cost_per_1m_in": 15,
 200      "cost_per_1m_out": 75,
 201      "cost_per_1m_in_cached": 18.75,
 202      "cost_per_1m_out_cached": 1.5,
 203      "context_window": 200000,
 204      "default_max_tokens": 16000,
 205      "can_reason": true,
 206      "reasoning_levels": [
 207        "low",
 208        "medium",
 209        "high"
 210      ],
 211      "default_reasoning_effort": "medium",
 212      "supports_attachments": true
 213    },
 214    {
 215      "id": "anthropic/claude-opus-4.5",
 216      "name": "Anthropic: Claude Opus 4.5",
 217      "cost_per_1m_in": 5,
 218      "cost_per_1m_out": 25,
 219      "cost_per_1m_in_cached": 6.25,
 220      "cost_per_1m_out_cached": 0.5,
 221      "context_window": 200000,
 222      "default_max_tokens": 32000,
 223      "can_reason": true,
 224      "reasoning_levels": [
 225        "low",
 226        "medium",
 227        "high"
 228      ],
 229      "default_reasoning_effort": "medium",
 230      "supports_attachments": true
 231    },
 232    {
 233      "id": "anthropic/claude-opus-4.6",
 234      "name": "Anthropic: Claude Opus 4.6",
 235      "cost_per_1m_in": 5,
 236      "cost_per_1m_out": 25,
 237      "cost_per_1m_in_cached": 6.25,
 238      "cost_per_1m_out_cached": 0.5,
 239      "context_window": 1000000,
 240      "default_max_tokens": 64000,
 241      "can_reason": true,
 242      "reasoning_levels": [
 243        "low",
 244        "medium",
 245        "high"
 246      ],
 247      "default_reasoning_effort": "medium",
 248      "supports_attachments": true
 249    },
 250    {
 251      "id": "anthropic/claude-opus-4.6-fast",
 252      "name": "Anthropic: Claude Opus 4.6 (Fast)",
 253      "cost_per_1m_in": 30,
 254      "cost_per_1m_out": 150,
 255      "cost_per_1m_in_cached": 37.5,
 256      "cost_per_1m_out_cached": 3,
 257      "context_window": 1000000,
 258      "default_max_tokens": 64000,
 259      "can_reason": true,
 260      "reasoning_levels": [
 261        "low",
 262        "medium",
 263        "high"
 264      ],
 265      "default_reasoning_effort": "medium",
 266      "supports_attachments": true
 267    },
 268    {
 269      "id": "anthropic/claude-opus-4.7",
 270      "name": "Anthropic: Claude Opus 4.7",
 271      "cost_per_1m_in": 5,
 272      "cost_per_1m_out": 25,
 273      "cost_per_1m_in_cached": 6.25,
 274      "cost_per_1m_out_cached": 0.5,
 275      "context_window": 1000000,
 276      "default_max_tokens": 64000,
 277      "can_reason": true,
 278      "reasoning_levels": [
 279        "low",
 280        "medium",
 281        "high"
 282      ],
 283      "default_reasoning_effort": "medium",
 284      "supports_attachments": true
 285    },
 286    {
 287      "id": "anthropic/claude-sonnet-4",
 288      "name": "Anthropic: Claude Sonnet 4",
 289      "cost_per_1m_in": 3,
 290      "cost_per_1m_out": 15,
 291      "cost_per_1m_in_cached": 3.75,
 292      "cost_per_1m_out_cached": 0.3,
 293      "context_window": 1000000,
 294      "default_max_tokens": 32000,
 295      "can_reason": true,
 296      "reasoning_levels": [
 297        "low",
 298        "medium",
 299        "high"
 300      ],
 301      "default_reasoning_effort": "medium",
 302      "supports_attachments": true
 303    },
 304    {
 305      "id": "anthropic/claude-sonnet-4.5",
 306      "name": "Anthropic: Claude Sonnet 4.5",
 307      "cost_per_1m_in": 3,
 308      "cost_per_1m_out": 15,
 309      "cost_per_1m_in_cached": 3.75,
 310      "cost_per_1m_out_cached": 0.3,
 311      "context_window": 1000000,
 312      "default_max_tokens": 32000,
 313      "can_reason": true,
 314      "reasoning_levels": [
 315        "low",
 316        "medium",
 317        "high"
 318      ],
 319      "default_reasoning_effort": "medium",
 320      "supports_attachments": true
 321    },
 322    {
 323      "id": "anthropic/claude-sonnet-4.6",
 324      "name": "Anthropic: Claude Sonnet 4.6",
 325      "cost_per_1m_in": 3,
 326      "cost_per_1m_out": 15,
 327      "cost_per_1m_in_cached": 3.75,
 328      "cost_per_1m_out_cached": 0.3,
 329      "context_window": 1000000,
 330      "default_max_tokens": 64000,
 331      "can_reason": true,
 332      "reasoning_levels": [
 333        "low",
 334        "medium",
 335        "high"
 336      ],
 337      "default_reasoning_effort": "medium",
 338      "supports_attachments": true
 339    },
 340    {
 341      "id": "arcee-ai/trinity-large-preview",
 342      "name": "Arcee AI: Trinity Large Preview",
 343      "cost_per_1m_in": 0.15,
 344      "cost_per_1m_out": 0.45,
 345      "cost_per_1m_in_cached": 0,
 346      "cost_per_1m_out_cached": 0,
 347      "context_window": 131000,
 348      "default_max_tokens": 13100,
 349      "can_reason": false,
 350      "supports_attachments": false
 351    },
 352    {
 353      "id": "arcee-ai/trinity-large-thinking",
 354      "name": "Arcee AI: Trinity Large Thinking",
 355      "cost_per_1m_in": 0.22,
 356      "cost_per_1m_out": 0.85,
 357      "cost_per_1m_in_cached": 0,
 358      "cost_per_1m_out_cached": 0.06,
 359      "context_window": 262144,
 360      "default_max_tokens": 131072,
 361      "can_reason": true,
 362      "reasoning_levels": [
 363        "low",
 364        "medium",
 365        "high"
 366      ],
 367      "default_reasoning_effort": "medium",
 368      "supports_attachments": false
 369    },
 370    {
 371      "id": "arcee-ai/trinity-mini",
 372      "name": "Arcee AI: Trinity Mini",
 373      "cost_per_1m_in": 0.045,
 374      "cost_per_1m_out": 0.15,
 375      "cost_per_1m_in_cached": 0,
 376      "cost_per_1m_out_cached": 0,
 377      "context_window": 131072,
 378      "default_max_tokens": 65536,
 379      "can_reason": true,
 380      "reasoning_levels": [
 381        "low",
 382        "medium",
 383        "high"
 384      ],
 385      "default_reasoning_effort": "medium",
 386      "supports_attachments": false
 387    },
 388    {
 389      "id": "arcee-ai/virtuoso-large",
 390      "name": "Arcee AI: Virtuoso Large",
 391      "cost_per_1m_in": 0.75,
 392      "cost_per_1m_out": 1.2,
 393      "cost_per_1m_in_cached": 0,
 394      "cost_per_1m_out_cached": 0,
 395      "context_window": 131072,
 396      "default_max_tokens": 32000,
 397      "can_reason": false,
 398      "supports_attachments": false
 399    },
 400    {
 401      "id": "baidu/ernie-4.5-21b-a3b",
 402      "name": "Baidu: ERNIE 4.5 21B A3B",
 403      "cost_per_1m_in": 0.07,
 404      "cost_per_1m_out": 0.28,
 405      "cost_per_1m_in_cached": 0,
 406      "cost_per_1m_out_cached": 0,
 407      "context_window": 120000,
 408      "default_max_tokens": 4000,
 409      "can_reason": false,
 410      "supports_attachments": false
 411    },
 412    {
 413      "id": "baidu/ernie-4.5-vl-28b-a3b",
 414      "name": "Baidu: ERNIE 4.5 VL 28B A3B",
 415      "cost_per_1m_in": 0.14,
 416      "cost_per_1m_out": 0.56,
 417      "cost_per_1m_in_cached": 0,
 418      "cost_per_1m_out_cached": 0,
 419      "context_window": 30000,
 420      "default_max_tokens": 4000,
 421      "can_reason": true,
 422      "reasoning_levels": [
 423        "low",
 424        "medium",
 425        "high"
 426      ],
 427      "default_reasoning_effort": "medium",
 428      "supports_attachments": true
 429    },
 430    {
 431      "id": "bytedance-seed/seed-1.6",
 432      "name": "ByteDance Seed: Seed 1.6",
 433      "cost_per_1m_in": 0.25,
 434      "cost_per_1m_out": 2,
 435      "cost_per_1m_in_cached": 0,
 436      "cost_per_1m_out_cached": 0,
 437      "context_window": 262144,
 438      "default_max_tokens": 16384,
 439      "can_reason": true,
 440      "reasoning_levels": [
 441        "low",
 442        "medium",
 443        "high"
 444      ],
 445      "default_reasoning_effort": "medium",
 446      "supports_attachments": true
 447    },
 448    {
 449      "id": "bytedance-seed/seed-1.6-flash",
 450      "name": "ByteDance Seed: Seed 1.6 Flash",
 451      "cost_per_1m_in": 0.075,
 452      "cost_per_1m_out": 0.3,
 453      "cost_per_1m_in_cached": 0,
 454      "cost_per_1m_out_cached": 0,
 455      "context_window": 262144,
 456      "default_max_tokens": 16384,
 457      "can_reason": true,
 458      "reasoning_levels": [
 459        "low",
 460        "medium",
 461        "high"
 462      ],
 463      "default_reasoning_effort": "medium",
 464      "supports_attachments": true
 465    },
 466    {
 467      "id": "bytedance-seed/seed-2.0-lite",
 468      "name": "ByteDance Seed: Seed-2.0-Lite",
 469      "cost_per_1m_in": 0.25,
 470      "cost_per_1m_out": 2,
 471      "cost_per_1m_in_cached": 0,
 472      "cost_per_1m_out_cached": 0,
 473      "context_window": 262144,
 474      "default_max_tokens": 65536,
 475      "can_reason": true,
 476      "reasoning_levels": [
 477        "low",
 478        "medium",
 479        "high"
 480      ],
 481      "default_reasoning_effort": "medium",
 482      "supports_attachments": true
 483    },
 484    {
 485      "id": "bytedance-seed/seed-2.0-mini",
 486      "name": "ByteDance Seed: Seed-2.0-Mini",
 487      "cost_per_1m_in": 0.1,
 488      "cost_per_1m_out": 0.4,
 489      "cost_per_1m_in_cached": 0,
 490      "cost_per_1m_out_cached": 0,
 491      "context_window": 262144,
 492      "default_max_tokens": 65536,
 493      "can_reason": true,
 494      "reasoning_levels": [
 495        "low",
 496        "medium",
 497        "high"
 498      ],
 499      "default_reasoning_effort": "medium",
 500      "supports_attachments": true
 501    },
 502    {
 503      "id": "cohere/command-r-08-2024",
 504      "name": "Cohere: Command R (08-2024)",
 505      "cost_per_1m_in": 0.15,
 506      "cost_per_1m_out": 0.6,
 507      "cost_per_1m_in_cached": 0,
 508      "cost_per_1m_out_cached": 0,
 509      "context_window": 128000,
 510      "default_max_tokens": 2000,
 511      "can_reason": false,
 512      "supports_attachments": false
 513    },
 514    {
 515      "id": "cohere/command-r-plus-08-2024",
 516      "name": "Cohere: Command R+ (08-2024)",
 517      "cost_per_1m_in": 2.5,
 518      "cost_per_1m_out": 10,
 519      "cost_per_1m_in_cached": 0,
 520      "cost_per_1m_out_cached": 0,
 521      "context_window": 128000,
 522      "default_max_tokens": 2000,
 523      "can_reason": false,
 524      "supports_attachments": false
 525    },
 526    {
 527      "id": "deepseek/deepseek-chat",
 528      "name": "DeepSeek: DeepSeek V3",
 529      "cost_per_1m_in": 0.4,
 530      "cost_per_1m_out": 1.3,
 531      "cost_per_1m_in_cached": 0,
 532      "cost_per_1m_out_cached": 0,
 533      "context_window": 64000,
 534      "default_max_tokens": 8000,
 535      "can_reason": false,
 536      "supports_attachments": false
 537    },
 538    {
 539      "id": "deepseek/deepseek-chat-v3-0324",
 540      "name": "DeepSeek: DeepSeek V3 0324",
 541      "cost_per_1m_in": 0.27,
 542      "cost_per_1m_out": 1.12,
 543      "cost_per_1m_in_cached": 0,
 544      "cost_per_1m_out_cached": 0.135,
 545      "context_window": 163840,
 546      "default_max_tokens": 81920,
 547      "can_reason": false,
 548      "supports_attachments": false
 549    },
 550    {
 551      "id": "deepseek/deepseek-chat-v3.1",
 552      "name": "DeepSeek: DeepSeek V3.1",
 553      "cost_per_1m_in": 0.6,
 554      "cost_per_1m_out": 1.7,
 555      "cost_per_1m_in_cached": 0,
 556      "cost_per_1m_out_cached": 0,
 557      "context_window": 163840,
 558      "default_max_tokens": 16384,
 559      "can_reason": true,
 560      "reasoning_levels": [
 561        "low",
 562        "medium",
 563        "high"
 564      ],
 565      "default_reasoning_effort": "medium",
 566      "supports_attachments": false
 567    },
 568    {
 569      "id": "deepseek/deepseek-v3.1-terminus",
 570      "name": "DeepSeek: DeepSeek V3.1 Terminus",
 571      "cost_per_1m_in": 0.27,
 572      "cost_per_1m_out": 0.95,
 573      "cost_per_1m_in_cached": 0,
 574      "cost_per_1m_out_cached": 0.13,
 575      "context_window": 163840,
 576      "default_max_tokens": 16384,
 577      "can_reason": true,
 578      "reasoning_levels": [
 579        "low",
 580        "medium",
 581        "high"
 582      ],
 583      "default_reasoning_effort": "medium",
 584      "supports_attachments": false
 585    },
 586    {
 587      "id": "deepseek/deepseek-v3.2",
 588      "name": "DeepSeek: DeepSeek V3.2",
 589      "cost_per_1m_in": 0.5,
 590      "cost_per_1m_out": 1.5,
 591      "cost_per_1m_in_cached": 0,
 592      "cost_per_1m_out_cached": 0.25,
 593      "context_window": 163840,
 594      "default_max_tokens": 81920,
 595      "can_reason": true,
 596      "reasoning_levels": [
 597        "low",
 598        "medium",
 599        "high"
 600      ],
 601      "default_reasoning_effort": "medium",
 602      "supports_attachments": false
 603    },
 604    {
 605      "id": "deepseek/deepseek-v3.2-exp",
 606      "name": "DeepSeek: DeepSeek V3.2 Exp",
 607      "cost_per_1m_in": 0.27,
 608      "cost_per_1m_out": 0.41,
 609      "cost_per_1m_in_cached": 0,
 610      "cost_per_1m_out_cached": 0.27,
 611      "context_window": 163840,
 612      "default_max_tokens": 81920,
 613      "can_reason": true,
 614      "reasoning_levels": [
 615        "low",
 616        "medium",
 617        "high"
 618      ],
 619      "default_reasoning_effort": "medium",
 620      "supports_attachments": false
 621    },
 622    {
 623      "id": "deepseek/deepseek-v4-flash",
 624      "name": "DeepSeek: DeepSeek V4 Flash",
 625      "cost_per_1m_in": 0.14,
 626      "cost_per_1m_out": 0.28,
 627      "cost_per_1m_in_cached": 0,
 628      "cost_per_1m_out_cached": 0.0028,
 629      "context_window": 1048576,
 630      "default_max_tokens": 192000,
 631      "can_reason": true,
 632      "reasoning_levels": [
 633        "low",
 634        "medium",
 635        "high"
 636      ],
 637      "default_reasoning_effort": "medium",
 638      "supports_attachments": false
 639    },
 640    {
 641      "id": "deepseek/deepseek-v4-pro",
 642      "name": "DeepSeek: DeepSeek V4 Pro",
 643      "cost_per_1m_in": 0.435,
 644      "cost_per_1m_out": 0.87,
 645      "cost_per_1m_in_cached": 0,
 646      "cost_per_1m_out_cached": 0.00363,
 647      "context_window": 1048576,
 648      "default_max_tokens": 192000,
 649      "can_reason": true,
 650      "reasoning_levels": [
 651        "low",
 652        "medium",
 653        "high"
 654      ],
 655      "default_reasoning_effort": "medium",
 656      "supports_attachments": false
 657    },
 658    {
 659      "id": "deepseek/deepseek-r1",
 660      "name": "DeepSeek: R1",
 661      "cost_per_1m_in": 0.7,
 662      "cost_per_1m_out": 2.5,
 663      "cost_per_1m_in_cached": 0,
 664      "cost_per_1m_out_cached": 0,
 665      "context_window": 64000,
 666      "default_max_tokens": 8000,
 667      "can_reason": true,
 668      "reasoning_levels": [
 669        "low",
 670        "medium",
 671        "high"
 672      ],
 673      "default_reasoning_effort": "medium",
 674      "supports_attachments": false
 675    },
 676    {
 677      "id": "deepseek/deepseek-r1-0528",
 678      "name": "DeepSeek: R1 0528",
 679      "cost_per_1m_in": 0.5,
 680      "cost_per_1m_out": 2.18,
 681      "cost_per_1m_in_cached": 0,
 682      "cost_per_1m_out_cached": 0,
 683      "context_window": 163840,
 684      "default_max_tokens": 81920,
 685      "can_reason": true,
 686      "reasoning_levels": [
 687        "low",
 688        "medium",
 689        "high"
 690      ],
 691      "default_reasoning_effort": "medium",
 692      "supports_attachments": false
 693    },
 694    {
 695      "id": "essentialai/rnj-1-instruct",
 696      "name": "EssentialAI: Rnj 1 Instruct",
 697      "cost_per_1m_in": 0.15,
 698      "cost_per_1m_out": 0.15,
 699      "cost_per_1m_in_cached": 0,
 700      "cost_per_1m_out_cached": 0,
 701      "context_window": 32768,
 702      "default_max_tokens": 3276,
 703      "can_reason": false,
 704      "supports_attachments": false
 705    },
 706    {
 707      "id": "google/gemini-2.0-flash-001",
 708      "name": "Google: Gemini 2.0 Flash",
 709      "cost_per_1m_in": 0.1,
 710      "cost_per_1m_out": 0.4,
 711      "cost_per_1m_in_cached": 0.08333,
 712      "cost_per_1m_out_cached": 0.025,
 713      "context_window": 1048576,
 714      "default_max_tokens": 4096,
 715      "can_reason": false,
 716      "supports_attachments": true
 717    },
 718    {
 719      "id": "google/gemini-2.0-flash-lite-001",
 720      "name": "Google: Gemini 2.0 Flash Lite",
 721      "cost_per_1m_in": 0.075,
 722      "cost_per_1m_out": 0.3,
 723      "cost_per_1m_in_cached": 0,
 724      "cost_per_1m_out_cached": 0,
 725      "context_window": 1048576,
 726      "default_max_tokens": 4096,
 727      "can_reason": false,
 728      "supports_attachments": true
 729    },
 730    {
 731      "id": "google/gemini-2.5-flash",
 732      "name": "Google: Gemini 2.5 Flash",
 733      "cost_per_1m_in": 0.3,
 734      "cost_per_1m_out": 2.5,
 735      "cost_per_1m_in_cached": 0.08333,
 736      "cost_per_1m_out_cached": 0.03,
 737      "context_window": 1048576,
 738      "default_max_tokens": 32767,
 739      "can_reason": true,
 740      "reasoning_levels": [
 741        "low",
 742        "medium",
 743        "high"
 744      ],
 745      "default_reasoning_effort": "medium",
 746      "supports_attachments": true
 747    },
 748    {
 749      "id": "google/gemini-2.5-flash-lite",
 750      "name": "Google: Gemini 2.5 Flash Lite",
 751      "cost_per_1m_in": 0.1,
 752      "cost_per_1m_out": 0.4,
 753      "cost_per_1m_in_cached": 0.08333,
 754      "cost_per_1m_out_cached": 0.01,
 755      "context_window": 1048576,
 756      "default_max_tokens": 32767,
 757      "can_reason": true,
 758      "reasoning_levels": [
 759        "low",
 760        "medium",
 761        "high"
 762      ],
 763      "default_reasoning_effort": "medium",
 764      "supports_attachments": true
 765    },
 766    {
 767      "id": "google/gemini-2.5-flash-lite-preview-09-2025",
 768      "name": "Google: Gemini 2.5 Flash Lite Preview 09-2025",
 769      "cost_per_1m_in": 0.1,
 770      "cost_per_1m_out": 0.4,
 771      "cost_per_1m_in_cached": 0.08333,
 772      "cost_per_1m_out_cached": 0.01,
 773      "context_window": 1048576,
 774      "default_max_tokens": 32767,
 775      "can_reason": true,
 776      "reasoning_levels": [
 777        "low",
 778        "medium",
 779        "high"
 780      ],
 781      "default_reasoning_effort": "medium",
 782      "supports_attachments": true
 783    },
 784    {
 785      "id": "google/gemini-2.5-pro",
 786      "name": "Google: Gemini 2.5 Pro",
 787      "cost_per_1m_in": 1.25,
 788      "cost_per_1m_out": 10,
 789      "cost_per_1m_in_cached": 0.375,
 790      "cost_per_1m_out_cached": 0.125,
 791      "context_window": 1048576,
 792      "default_max_tokens": 32768,
 793      "can_reason": true,
 794      "reasoning_levels": [
 795        "low",
 796        "medium",
 797        "high"
 798      ],
 799      "default_reasoning_effort": "medium",
 800      "supports_attachments": true
 801    },
 802    {
 803      "id": "google/gemini-2.5-pro-preview-05-06",
 804      "name": "Google: Gemini 2.5 Pro Preview 05-06",
 805      "cost_per_1m_in": 1.25,
 806      "cost_per_1m_out": 10,
 807      "cost_per_1m_in_cached": 0.375,
 808      "cost_per_1m_out_cached": 0.125,
 809      "context_window": 1048576,
 810      "default_max_tokens": 32768,
 811      "can_reason": true,
 812      "reasoning_levels": [
 813        "low",
 814        "medium",
 815        "high"
 816      ],
 817      "default_reasoning_effort": "medium",
 818      "supports_attachments": true
 819    },
 820    {
 821      "id": "google/gemini-2.5-pro-preview",
 822      "name": "Google: Gemini 2.5 Pro Preview 06-05",
 823      "cost_per_1m_in": 1.25,
 824      "cost_per_1m_out": 10,
 825      "cost_per_1m_in_cached": 0.375,
 826      "cost_per_1m_out_cached": 0.125,
 827      "context_window": 1048576,
 828      "default_max_tokens": 32768,
 829      "can_reason": true,
 830      "reasoning_levels": [
 831        "low",
 832        "medium",
 833        "high"
 834      ],
 835      "default_reasoning_effort": "medium",
 836      "supports_attachments": true
 837    },
 838    {
 839      "id": "google/gemini-3-flash-preview",
 840      "name": "Google: Gemini 3 Flash Preview",
 841      "cost_per_1m_in": 0.5,
 842      "cost_per_1m_out": 3,
 843      "cost_per_1m_in_cached": 0.08333,
 844      "cost_per_1m_out_cached": 0.05,
 845      "context_window": 1048576,
 846      "default_max_tokens": 32768,
 847      "can_reason": true,
 848      "reasoning_levels": [
 849        "low",
 850        "medium",
 851        "high"
 852      ],
 853      "default_reasoning_effort": "medium",
 854      "supports_attachments": true
 855    },
 856    {
 857      "id": "google/gemini-3.1-flash-lite-preview",
 858      "name": "Google: Gemini 3.1 Flash Lite Preview",
 859      "cost_per_1m_in": 0.25,
 860      "cost_per_1m_out": 1.5,
 861      "cost_per_1m_in_cached": 0.08333,
 862      "cost_per_1m_out_cached": 0.025,
 863      "context_window": 1048576,
 864      "default_max_tokens": 32768,
 865      "can_reason": true,
 866      "reasoning_levels": [
 867        "low",
 868        "medium",
 869        "high"
 870      ],
 871      "default_reasoning_effort": "medium",
 872      "supports_attachments": true
 873    },
 874    {
 875      "id": "google/gemini-3.1-pro-preview",
 876      "name": "Google: Gemini 3.1 Pro Preview",
 877      "cost_per_1m_in": 2,
 878      "cost_per_1m_out": 12,
 879      "cost_per_1m_in_cached": 0.375,
 880      "cost_per_1m_out_cached": 0.2,
 881      "context_window": 1048576,
 882      "default_max_tokens": 32768,
 883      "can_reason": true,
 884      "reasoning_levels": [
 885        "low",
 886        "medium",
 887        "high"
 888      ],
 889      "default_reasoning_effort": "medium",
 890      "supports_attachments": true
 891    },
 892    {
 893      "id": "google/gemini-3.1-pro-preview-customtools",
 894      "name": "Google: Gemini 3.1 Pro Preview Custom Tools",
 895      "cost_per_1m_in": 2,
 896      "cost_per_1m_out": 12,
 897      "cost_per_1m_in_cached": 0.375,
 898      "cost_per_1m_out_cached": 0.2,
 899      "context_window": 1048576,
 900      "default_max_tokens": 32768,
 901      "can_reason": true,
 902      "reasoning_levels": [
 903        "low",
 904        "medium",
 905        "high"
 906      ],
 907      "default_reasoning_effort": "medium",
 908      "supports_attachments": true
 909    },
 910    {
 911      "id": "google/gemma-3-12b-it",
 912      "name": "Google: Gemma 3 12B",
 913      "cost_per_1m_in": 0.04,
 914      "cost_per_1m_out": 0.13,
 915      "cost_per_1m_in_cached": 0,
 916      "cost_per_1m_out_cached": 0,
 917      "context_window": 131072,
 918      "default_max_tokens": 8192,
 919      "can_reason": false,
 920      "supports_attachments": true
 921    },
 922    {
 923      "id": "google/gemma-4-26b-a4b-it",
 924      "name": "Google: Gemma 4 26B A4B ",
 925      "cost_per_1m_in": 0.15,
 926      "cost_per_1m_out": 0.6,
 927      "cost_per_1m_in_cached": 0,
 928      "cost_per_1m_out_cached": 0,
 929      "context_window": 262144,
 930      "default_max_tokens": 131072,
 931      "can_reason": true,
 932      "reasoning_levels": [
 933        "low",
 934        "medium",
 935        "high"
 936      ],
 937      "default_reasoning_effort": "medium",
 938      "supports_attachments": true
 939    },
 940    {
 941      "id": "google/gemma-4-26b-a4b-it:free",
 942      "name": "Google: Gemma 4 26B A4B  (free)",
 943      "cost_per_1m_in": 0,
 944      "cost_per_1m_out": 0,
 945      "cost_per_1m_in_cached": 0,
 946      "cost_per_1m_out_cached": 0,
 947      "context_window": 262144,
 948      "default_max_tokens": 16384,
 949      "can_reason": true,
 950      "reasoning_levels": [
 951        "low",
 952        "medium",
 953        "high"
 954      ],
 955      "default_reasoning_effort": "medium",
 956      "supports_attachments": true
 957    },
 958    {
 959      "id": "google/gemma-4-31b-it",
 960      "name": "Google: Gemma 4 31B",
 961      "cost_per_1m_in": 0.13,
 962      "cost_per_1m_out": 0.38,
 963      "cost_per_1m_in_cached": 0,
 964      "cost_per_1m_out_cached": 0,
 965      "context_window": 262144,
 966      "default_max_tokens": 8192,
 967      "can_reason": true,
 968      "reasoning_levels": [
 969        "low",
 970        "medium",
 971        "high"
 972      ],
 973      "default_reasoning_effort": "medium",
 974      "supports_attachments": true
 975    },
 976    {
 977      "id": "google/gemma-4-31b-it:free",
 978      "name": "Google: Gemma 4 31B (free)",
 979      "cost_per_1m_in": 0,
 980      "cost_per_1m_out": 0,
 981      "cost_per_1m_in_cached": 0,
 982      "cost_per_1m_out_cached": 0,
 983      "context_window": 262144,
 984      "default_max_tokens": 16384,
 985      "can_reason": true,
 986      "reasoning_levels": [
 987        "low",
 988        "medium",
 989        "high"
 990      ],
 991      "default_reasoning_effort": "medium",
 992      "supports_attachments": true
 993    },
 994    {
 995      "id": "ibm-granite/granite-4.1-8b",
 996      "name": "IBM: Granite 4.1 8B",
 997      "cost_per_1m_in": 0.05,
 998      "cost_per_1m_out": 0.1,
 999      "cost_per_1m_in_cached": 0,
1000      "cost_per_1m_out_cached": 0.05,
1001      "context_window": 131072,
1002      "default_max_tokens": 65536,
1003      "can_reason": false,
1004      "supports_attachments": false
1005    },
1006    {
1007      "id": "inception/mercury-2",
1008      "name": "Inception: Mercury 2",
1009      "cost_per_1m_in": 0.25,
1010      "cost_per_1m_out": 0.75,
1011      "cost_per_1m_in_cached": 0,
1012      "cost_per_1m_out_cached": 0.025,
1013      "context_window": 128000,
1014      "default_max_tokens": 25000,
1015      "can_reason": true,
1016      "reasoning_levels": [
1017        "low",
1018        "medium",
1019        "high"
1020      ],
1021      "default_reasoning_effort": "medium",
1022      "supports_attachments": false
1023    },
1024    {
1025      "id": "kwaipilot/kat-coder-pro-v2",
1026      "name": "Kwaipilot: KAT-Coder-Pro V2",
1027      "cost_per_1m_in": 0.3,
1028      "cost_per_1m_out": 1.2,
1029      "cost_per_1m_in_cached": 0,
1030      "cost_per_1m_out_cached": 0.06,
1031      "context_window": 256000,
1032      "default_max_tokens": 40000,
1033      "can_reason": false,
1034      "supports_attachments": false
1035    },
1036    {
1037      "id": "meta-llama/llama-3.1-70b-instruct",
1038      "name": "Meta: Llama 3.1 70B Instruct",
1039      "cost_per_1m_in": 0.4,
1040      "cost_per_1m_out": 0.4,
1041      "cost_per_1m_in_cached": 0,
1042      "cost_per_1m_out_cached": 0,
1043      "context_window": 131072,
1044      "default_max_tokens": 8192,
1045      "can_reason": false,
1046      "supports_attachments": false
1047    },
1048    {
1049      "id": "meta-llama/llama-3.3-70b-instruct",
1050      "name": "Meta: Llama 3.3 70B Instruct",
1051      "cost_per_1m_in": 0.135,
1052      "cost_per_1m_out": 0.4,
1053      "cost_per_1m_in_cached": 0,
1054      "cost_per_1m_out_cached": 0,
1055      "context_window": 131072,
1056      "default_max_tokens": 60000,
1057      "can_reason": false,
1058      "supports_attachments": false
1059    },
1060    {
1061      "id": "meta-llama/llama-3.3-70b-instruct:free",
1062      "name": "Meta: Llama 3.3 70B Instruct (free)",
1063      "cost_per_1m_in": 0,
1064      "cost_per_1m_out": 0,
1065      "cost_per_1m_in_cached": 0,
1066      "cost_per_1m_out_cached": 0,
1067      "context_window": 65536,
1068      "default_max_tokens": 6553,
1069      "can_reason": false,
1070      "supports_attachments": false
1071    },
1072    {
1073      "id": "meta-llama/llama-4-scout",
1074      "name": "Meta: Llama 4 Scout",
1075      "cost_per_1m_in": 0.25,
1076      "cost_per_1m_out": 0.7,
1077      "cost_per_1m_in_cached": 0,
1078      "cost_per_1m_out_cached": 0,
1079      "context_window": 1310720,
1080      "default_max_tokens": 4096,
1081      "can_reason": false,
1082      "supports_attachments": true
1083    },
1084    {
1085      "id": "minimax/minimax-m2",
1086      "name": "MiniMax: MiniMax M2",
1087      "cost_per_1m_in": 0.255,
1088      "cost_per_1m_out": 1,
1089      "cost_per_1m_in_cached": 0,
1090      "cost_per_1m_out_cached": 0.03,
1091      "context_window": 196608,
1092      "default_max_tokens": 98304,
1093      "can_reason": true,
1094      "reasoning_levels": [
1095        "low",
1096        "medium",
1097        "high"
1098      ],
1099      "default_reasoning_effort": "medium",
1100      "supports_attachments": false
1101    },
1102    {
1103      "id": "minimax/minimax-m2.1",
1104      "name": "MiniMax: MiniMax M2.1",
1105      "cost_per_1m_in": 0.3,
1106      "cost_per_1m_out": 1.2,
1107      "cost_per_1m_in_cached": 0,
1108      "cost_per_1m_out_cached": 0.03,
1109      "context_window": 204800,
1110      "default_max_tokens": 65536,
1111      "can_reason": true,
1112      "reasoning_levels": [
1113        "low",
1114        "medium",
1115        "high"
1116      ],
1117      "default_reasoning_effort": "medium",
1118      "supports_attachments": false
1119    },
1120    {
1121      "id": "minimax/minimax-m2.5",
1122      "name": "MiniMax: MiniMax M2.5",
1123      "cost_per_1m_in": 0.3,
1124      "cost_per_1m_out": 1.2,
1125      "cost_per_1m_in_cached": 0,
1126      "cost_per_1m_out_cached": 0.03,
1127      "context_window": 204800,
1128      "default_max_tokens": 65536,
1129      "can_reason": true,
1130      "reasoning_levels": [
1131        "low",
1132        "medium",
1133        "high"
1134      ],
1135      "default_reasoning_effort": "medium",
1136      "supports_attachments": false
1137    },
1138    {
1139      "id": "minimax/minimax-m2.5:free",
1140      "name": "MiniMax: MiniMax M2.5 (free)",
1141      "cost_per_1m_in": 0,
1142      "cost_per_1m_out": 0,
1143      "cost_per_1m_in_cached": 0,
1144      "cost_per_1m_out_cached": 0,
1145      "context_window": 196608,
1146      "default_max_tokens": 4096,
1147      "can_reason": true,
1148      "reasoning_levels": [
1149        "low",
1150        "medium",
1151        "high"
1152      ],
1153      "default_reasoning_effort": "medium",
1154      "supports_attachments": false
1155    },
1156    {
1157      "id": "minimax/minimax-m2.7",
1158      "name": "MiniMax: MiniMax M2.7",
1159      "cost_per_1m_in": 0.3,
1160      "cost_per_1m_out": 1.2,
1161      "cost_per_1m_in_cached": 0,
1162      "cost_per_1m_out_cached": 0.06,
1163      "context_window": 204800,
1164      "default_max_tokens": 65536,
1165      "can_reason": true,
1166      "reasoning_levels": [
1167        "low",
1168        "medium",
1169        "high"
1170      ],
1171      "default_reasoning_effort": "medium",
1172      "supports_attachments": false
1173    },
1174    {
1175      "id": "mistralai/mistral-large",
1176      "name": "Mistral Large",
1177      "cost_per_1m_in": 2,
1178      "cost_per_1m_out": 6,
1179      "cost_per_1m_in_cached": 0,
1180      "cost_per_1m_out_cached": 0.2,
1181      "context_window": 128000,
1182      "default_max_tokens": 12800,
1183      "can_reason": false,
1184      "supports_attachments": false
1185    },
1186    {
1187      "id": "mistralai/mistral-large-2407",
1188      "name": "Mistral Large 2407",
1189      "cost_per_1m_in": 2,
1190      "cost_per_1m_out": 6,
1191      "cost_per_1m_in_cached": 0,
1192      "cost_per_1m_out_cached": 0.2,
1193      "context_window": 131072,
1194      "default_max_tokens": 13107,
1195      "can_reason": false,
1196      "supports_attachments": false
1197    },
1198    {
1199      "id": "mistralai/mistral-large-2411",
1200      "name": "Mistral Large 2411",
1201      "cost_per_1m_in": 2,
1202      "cost_per_1m_out": 6,
1203      "cost_per_1m_in_cached": 0,
1204      "cost_per_1m_out_cached": 0.2,
1205      "context_window": 131072,
1206      "default_max_tokens": 13107,
1207      "can_reason": false,
1208      "supports_attachments": false
1209    },
1210    {
1211      "id": "mistralai/codestral-2508",
1212      "name": "Mistral: Codestral 2508",
1213      "cost_per_1m_in": 0.3,
1214      "cost_per_1m_out": 0.9,
1215      "cost_per_1m_in_cached": 0,
1216      "cost_per_1m_out_cached": 0.03,
1217      "context_window": 256000,
1218      "default_max_tokens": 25600,
1219      "can_reason": false,
1220      "supports_attachments": false
1221    },
1222    {
1223      "id": "mistralai/devstral-2512",
1224      "name": "Mistral: Devstral 2 2512",
1225      "cost_per_1m_in": 0.4,
1226      "cost_per_1m_out": 2,
1227      "cost_per_1m_in_cached": 0,
1228      "cost_per_1m_out_cached": 0.04,
1229      "context_window": 262144,
1230      "default_max_tokens": 26214,
1231      "can_reason": false,
1232      "supports_attachments": false
1233    },
1234    {
1235      "id": "mistralai/devstral-medium",
1236      "name": "Mistral: Devstral Medium",
1237      "cost_per_1m_in": 0.4,
1238      "cost_per_1m_out": 2,
1239      "cost_per_1m_in_cached": 0,
1240      "cost_per_1m_out_cached": 0.04,
1241      "context_window": 131072,
1242      "default_max_tokens": 13107,
1243      "can_reason": false,
1244      "supports_attachments": false
1245    },
1246    {
1247      "id": "mistralai/devstral-small",
1248      "name": "Mistral: Devstral Small 1.1",
1249      "cost_per_1m_in": 0.1,
1250      "cost_per_1m_out": 0.3,
1251      "cost_per_1m_in_cached": 0,
1252      "cost_per_1m_out_cached": 0.01,
1253      "context_window": 131072,
1254      "default_max_tokens": 13107,
1255      "can_reason": false,
1256      "supports_attachments": false
1257    },
1258    {
1259      "id": "mistralai/ministral-14b-2512",
1260      "name": "Mistral: Ministral 3 14B 2512",
1261      "cost_per_1m_in": 0.35,
1262      "cost_per_1m_out": 0.35,
1263      "cost_per_1m_in_cached": 0,
1264      "cost_per_1m_out_cached": 0,
1265      "context_window": 262144,
1266      "default_max_tokens": 131072,
1267      "can_reason": false,
1268      "supports_attachments": true
1269    },
1270    {
1271      "id": "mistralai/ministral-3b-2512",
1272      "name": "Mistral: Ministral 3 3B 2512",
1273      "cost_per_1m_in": 0.1,
1274      "cost_per_1m_out": 0.1,
1275      "cost_per_1m_in_cached": 0,
1276      "cost_per_1m_out_cached": 0.01,
1277      "context_window": 131072,
1278      "default_max_tokens": 13107,
1279      "can_reason": false,
1280      "supports_attachments": true
1281    },
1282    {
1283      "id": "mistralai/ministral-8b-2512",
1284      "name": "Mistral: Ministral 3 8B 2512",
1285      "cost_per_1m_in": 0.3,
1286      "cost_per_1m_out": 0.3,
1287      "cost_per_1m_in_cached": 0,
1288      "cost_per_1m_out_cached": 0,
1289      "context_window": 262144,
1290      "default_max_tokens": 131072,
1291      "can_reason": false,
1292      "supports_attachments": true
1293    },
1294    {
1295      "id": "mistralai/mistral-large-2512",
1296      "name": "Mistral: Mistral Large 3 2512",
1297      "cost_per_1m_in": 0.5,
1298      "cost_per_1m_out": 1.5,
1299      "cost_per_1m_in_cached": 0,
1300      "cost_per_1m_out_cached": 0.05,
1301      "context_window": 262144,
1302      "default_max_tokens": 26214,
1303      "can_reason": false,
1304      "supports_attachments": true
1305    },
1306    {
1307      "id": "mistralai/mistral-medium-3",
1308      "name": "Mistral: Mistral Medium 3",
1309      "cost_per_1m_in": 0.4,
1310      "cost_per_1m_out": 2,
1311      "cost_per_1m_in_cached": 0,
1312      "cost_per_1m_out_cached": 0.04,
1313      "context_window": 131072,
1314      "default_max_tokens": 13107,
1315      "can_reason": false,
1316      "supports_attachments": true
1317    },
1318    {
1319      "id": "mistralai/mistral-medium-3.1",
1320      "name": "Mistral: Mistral Medium 3.1",
1321      "cost_per_1m_in": 0.4,
1322      "cost_per_1m_out": 2,
1323      "cost_per_1m_in_cached": 0,
1324      "cost_per_1m_out_cached": 0.04,
1325      "context_window": 131072,
1326      "default_max_tokens": 13107,
1327      "can_reason": false,
1328      "supports_attachments": true
1329    },
1330    {
1331      "id": "mistralai/mistral-nemo",
1332      "name": "Mistral: Mistral Nemo",
1333      "cost_per_1m_in": 0.15,
1334      "cost_per_1m_out": 0.15,
1335      "cost_per_1m_in_cached": 0,
1336      "cost_per_1m_out_cached": 0.015,
1337      "context_window": 131072,
1338      "default_max_tokens": 13107,
1339      "can_reason": false,
1340      "supports_attachments": false
1341    },
1342    {
1343      "id": "mistralai/mistral-small-3.2-24b-instruct",
1344      "name": "Mistral: Mistral Small 3.2 24B",
1345      "cost_per_1m_in": 0.09375,
1346      "cost_per_1m_out": 0.25,
1347      "cost_per_1m_in_cached": 0,
1348      "cost_per_1m_out_cached": 0,
1349      "context_window": 256000,
1350      "default_max_tokens": 8192,
1351      "can_reason": false,
1352      "supports_attachments": true
1353    },
1354    {
1355      "id": "mistralai/mistral-small-2603",
1356      "name": "Mistral: Mistral Small 4",
1357      "cost_per_1m_in": 0.15,
1358      "cost_per_1m_out": 0.6,
1359      "cost_per_1m_in_cached": 0,
1360      "cost_per_1m_out_cached": 0.015,
1361      "context_window": 262144,
1362      "default_max_tokens": 26214,
1363      "can_reason": true,
1364      "reasoning_levels": [
1365        "low",
1366        "medium",
1367        "high"
1368      ],
1369      "default_reasoning_effort": "medium",
1370      "supports_attachments": true
1371    },
1372    {
1373      "id": "mistralai/mixtral-8x22b-instruct",
1374      "name": "Mistral: Mixtral 8x22B Instruct",
1375      "cost_per_1m_in": 2,
1376      "cost_per_1m_out": 6,
1377      "cost_per_1m_in_cached": 0,
1378      "cost_per_1m_out_cached": 0.2,
1379      "context_window": 65536,
1380      "default_max_tokens": 6553,
1381      "can_reason": false,
1382      "supports_attachments": false
1383    },
1384    {
1385      "id": "mistralai/mixtral-8x7b-instruct",
1386      "name": "Mistral: Mixtral 8x7B Instruct",
1387      "cost_per_1m_in": 0.54,
1388      "cost_per_1m_out": 0.54,
1389      "cost_per_1m_in_cached": 0,
1390      "cost_per_1m_out_cached": 0,
1391      "context_window": 32768,
1392      "default_max_tokens": 8192,
1393      "can_reason": false,
1394      "supports_attachments": false
1395    },
1396    {
1397      "id": "mistralai/pixtral-large-2411",
1398      "name": "Mistral: Pixtral Large 2411",
1399      "cost_per_1m_in": 2,
1400      "cost_per_1m_out": 6,
1401      "cost_per_1m_in_cached": 0,
1402      "cost_per_1m_out_cached": 0.2,
1403      "context_window": 131072,
1404      "default_max_tokens": 13107,
1405      "can_reason": false,
1406      "supports_attachments": true
1407    },
1408    {
1409      "id": "mistralai/mistral-saba",
1410      "name": "Mistral: Saba",
1411      "cost_per_1m_in": 0.2,
1412      "cost_per_1m_out": 0.6,
1413      "cost_per_1m_in_cached": 0,
1414      "cost_per_1m_out_cached": 0.02,
1415      "context_window": 32768,
1416      "default_max_tokens": 3276,
1417      "can_reason": false,
1418      "supports_attachments": false
1419    },
1420    {
1421      "id": "mistralai/voxtral-small-24b-2507",
1422      "name": "Mistral: Voxtral Small 24B 2507",
1423      "cost_per_1m_in": 0.1,
1424      "cost_per_1m_out": 0.3,
1425      "cost_per_1m_in_cached": 0,
1426      "cost_per_1m_out_cached": 0.01,
1427      "context_window": 32000,
1428      "default_max_tokens": 3200,
1429      "can_reason": false,
1430      "supports_attachments": false
1431    },
1432    {
1433      "id": "moonshotai/kimi-k2",
1434      "name": "MoonshotAI: Kimi K2 0711",
1435      "cost_per_1m_in": 0.57,
1436      "cost_per_1m_out": 2.3,
1437      "cost_per_1m_in_cached": 0,
1438      "cost_per_1m_out_cached": 0,
1439      "context_window": 131072,
1440      "default_max_tokens": 16384,
1441      "can_reason": false,
1442      "supports_attachments": false
1443    },
1444    {
1445      "id": "moonshotai/kimi-k2-0905",
1446      "name": "MoonshotAI: Kimi K2 0905",
1447      "cost_per_1m_in": 0.6,
1448      "cost_per_1m_out": 2.5,
1449      "cost_per_1m_in_cached": 0,
1450      "cost_per_1m_out_cached": 0,
1451      "context_window": 262144,
1452      "default_max_tokens": 131072,
1453      "can_reason": false,
1454      "supports_attachments": false
1455    },
1456    {
1457      "id": "moonshotai/kimi-k2-thinking",
1458      "name": "MoonshotAI: Kimi K2 Thinking",
1459      "cost_per_1m_in": 0.6,
1460      "cost_per_1m_out": 2.5,
1461      "cost_per_1m_in_cached": 0,
1462      "cost_per_1m_out_cached": 0.15,
1463      "context_window": 262144,
1464      "default_max_tokens": 131072,
1465      "can_reason": true,
1466      "reasoning_levels": [
1467        "low",
1468        "medium",
1469        "high"
1470      ],
1471      "default_reasoning_effort": "medium",
1472      "supports_attachments": false
1473    },
1474    {
1475      "id": "moonshotai/kimi-k2.5",
1476      "name": "MoonshotAI: Kimi K2.5",
1477      "cost_per_1m_in": 0.55,
1478      "cost_per_1m_out": 2.75,
1479      "cost_per_1m_in_cached": 0,
1480      "cost_per_1m_out_cached": 0.1,
1481      "context_window": 262144,
1482      "default_max_tokens": 131072,
1483      "can_reason": true,
1484      "reasoning_levels": [
1485        "low",
1486        "medium",
1487        "high"
1488      ],
1489      "default_reasoning_effort": "medium",
1490      "supports_attachments": true
1491    },
1492    {
1493      "id": "moonshotai/kimi-k2.6",
1494      "name": "MoonshotAI: Kimi K2.6",
1495      "cost_per_1m_in": 0.95,
1496      "cost_per_1m_out": 4,
1497      "cost_per_1m_in_cached": 0,
1498      "cost_per_1m_out_cached": 0.16,
1499      "context_window": 262144,
1500      "default_max_tokens": 26214,
1501      "can_reason": true,
1502      "reasoning_levels": [
1503        "low",
1504        "medium",
1505        "high"
1506      ],
1507      "default_reasoning_effort": "medium",
1508      "supports_attachments": true
1509    },
1510    {
1511      "id": "nvidia/llama-3.1-nemotron-70b-instruct",
1512      "name": "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
1513      "cost_per_1m_in": 1.2,
1514      "cost_per_1m_out": 1.2,
1515      "cost_per_1m_in_cached": 0,
1516      "cost_per_1m_out_cached": 0,
1517      "context_window": 131072,
1518      "default_max_tokens": 8192,
1519      "can_reason": false,
1520      "supports_attachments": false
1521    },
1522    {
1523      "id": "nvidia/llama-3.3-nemotron-super-49b-v1.5",
1524      "name": "NVIDIA: Llama 3.3 Nemotron Super 49B V1.5",
1525      "cost_per_1m_in": 0.1,
1526      "cost_per_1m_out": 0.4,
1527      "cost_per_1m_in_cached": 0,
1528      "cost_per_1m_out_cached": 0,
1529      "context_window": 131072,
1530      "default_max_tokens": 8192,
1531      "can_reason": true,
1532      "reasoning_levels": [
1533        "low",
1534        "medium",
1535        "high"
1536      ],
1537      "default_reasoning_effort": "medium",
1538      "supports_attachments": false
1539    },
1540    {
1541      "id": "nvidia/nemotron-3-nano-30b-a3b",
1542      "name": "NVIDIA: Nemotron 3 Nano 30B A3B",
1543      "cost_per_1m_in": 0.05,
1544      "cost_per_1m_out": 0.2,
1545      "cost_per_1m_in_cached": 0,
1546      "cost_per_1m_out_cached": 0,
1547      "context_window": 262144,
1548      "default_max_tokens": 114000,
1549      "can_reason": true,
1550      "reasoning_levels": [
1551        "low",
1552        "medium",
1553        "high"
1554      ],
1555      "default_reasoning_effort": "medium",
1556      "supports_attachments": false
1557    },
1558    {
1559      "id": "nvidia/nemotron-3-nano-30b-a3b:free",
1560      "name": "NVIDIA: Nemotron 3 Nano 30B A3B (free)",
1561      "cost_per_1m_in": 0,
1562      "cost_per_1m_out": 0,
1563      "cost_per_1m_in_cached": 0,
1564      "cost_per_1m_out_cached": 0,
1565      "context_window": 256000,
1566      "default_max_tokens": 25600,
1567      "can_reason": true,
1568      "reasoning_levels": [
1569        "low",
1570        "medium",
1571        "high"
1572      ],
1573      "default_reasoning_effort": "medium",
1574      "supports_attachments": false
1575    },
1576    {
1577      "id": "nvidia/nemotron-3-nano-omni-30b-a3b-reasoning:free",
1578      "name": "NVIDIA: Nemotron 3 Nano Omni (free)",
1579      "cost_per_1m_in": 0,
1580      "cost_per_1m_out": 0,
1581      "cost_per_1m_in_cached": 0,
1582      "cost_per_1m_out_cached": 0,
1583      "context_window": 256000,
1584      "default_max_tokens": 32768,
1585      "can_reason": true,
1586      "reasoning_levels": [
1587        "low",
1588        "medium",
1589        "high"
1590      ],
1591      "default_reasoning_effort": "medium",
1592      "supports_attachments": true
1593    },
1594    {
1595      "id": "nvidia/nemotron-3-super-120b-a12b",
1596      "name": "NVIDIA: Nemotron 3 Super",
1597      "cost_per_1m_in": 0.1,
1598      "cost_per_1m_out": 0.5,
1599      "cost_per_1m_in_cached": 0,
1600      "cost_per_1m_out_cached": 0,
1601      "context_window": 262144,
1602      "default_max_tokens": 8192,
1603      "can_reason": true,
1604      "reasoning_levels": [
1605        "low",
1606        "medium",
1607        "high"
1608      ],
1609      "default_reasoning_effort": "medium",
1610      "supports_attachments": false
1611    },
1612    {
1613      "id": "nvidia/nemotron-3-super-120b-a12b:free",
1614      "name": "NVIDIA: Nemotron 3 Super (free)",
1615      "cost_per_1m_in": 0,
1616      "cost_per_1m_out": 0,
1617      "cost_per_1m_in_cached": 0,
1618      "cost_per_1m_out_cached": 0,
1619      "context_window": 262144,
1620      "default_max_tokens": 131072,
1621      "can_reason": true,
1622      "reasoning_levels": [
1623        "low",
1624        "medium",
1625        "high"
1626      ],
1627      "default_reasoning_effort": "medium",
1628      "supports_attachments": false
1629    },
1630    {
1631      "id": "nvidia/nemotron-nano-12b-v2-vl:free",
1632      "name": "NVIDIA: Nemotron Nano 12B 2 VL (free)",
1633      "cost_per_1m_in": 0,
1634      "cost_per_1m_out": 0,
1635      "cost_per_1m_in_cached": 0,
1636      "cost_per_1m_out_cached": 0,
1637      "context_window": 128000,
1638      "default_max_tokens": 64000,
1639      "can_reason": true,
1640      "reasoning_levels": [
1641        "low",
1642        "medium",
1643        "high"
1644      ],
1645      "default_reasoning_effort": "medium",
1646      "supports_attachments": true
1647    },
1648    {
1649      "id": "nvidia/nemotron-nano-9b-v2",
1650      "name": "NVIDIA: Nemotron Nano 9B V2",
1651      "cost_per_1m_in": 0.04,
1652      "cost_per_1m_out": 0.16,
1653      "cost_per_1m_in_cached": 0,
1654      "cost_per_1m_out_cached": 0,
1655      "context_window": 131072,
1656      "default_max_tokens": 8192,
1657      "can_reason": true,
1658      "reasoning_levels": [
1659        "low",
1660        "medium",
1661        "high"
1662      ],
1663      "default_reasoning_effort": "medium",
1664      "supports_attachments": false
1665    },
1666    {
1667      "id": "nvidia/nemotron-nano-9b-v2:free",
1668      "name": "NVIDIA: Nemotron Nano 9B V2 (free)",
1669      "cost_per_1m_in": 0,
1670      "cost_per_1m_out": 0,
1671      "cost_per_1m_in_cached": 0,
1672      "cost_per_1m_out_cached": 0,
1673      "context_window": 128000,
1674      "default_max_tokens": 12800,
1675      "can_reason": true,
1676      "reasoning_levels": [
1677        "low",
1678        "medium",
1679        "high"
1680      ],
1681      "default_reasoning_effort": "medium",
1682      "supports_attachments": false
1683    },
1684    {
1685      "id": "nex-agi/deepseek-v3.1-nex-n1",
1686      "name": "Nex AGI: DeepSeek V3.1 Nex N1",
1687      "cost_per_1m_in": 0.135,
1688      "cost_per_1m_out": 0.5,
1689      "cost_per_1m_in_cached": 0,
1690      "cost_per_1m_out_cached": 0,
1691      "context_window": 131072,
1692      "default_max_tokens": 81920,
1693      "can_reason": false,
1694      "supports_attachments": false
1695    },
1696    {
1697      "id": "openai/gpt-audio",
1698      "name": "OpenAI: GPT Audio",
1699      "cost_per_1m_in": 2.5,
1700      "cost_per_1m_out": 10,
1701      "cost_per_1m_in_cached": 0,
1702      "cost_per_1m_out_cached": 0,
1703      "context_window": 128000,
1704      "default_max_tokens": 8192,
1705      "can_reason": false,
1706      "supports_attachments": false
1707    },
1708    {
1709      "id": "openai/gpt-audio-mini",
1710      "name": "OpenAI: GPT Audio Mini",
1711      "cost_per_1m_in": 0.6,
1712      "cost_per_1m_out": 2.4,
1713      "cost_per_1m_in_cached": 0,
1714      "cost_per_1m_out_cached": 0,
1715      "context_window": 128000,
1716      "default_max_tokens": 8192,
1717      "can_reason": false,
1718      "supports_attachments": false
1719    },
1720    {
1721      "id": "openai/gpt-4-turbo",
1722      "name": "OpenAI: GPT-4 Turbo",
1723      "cost_per_1m_in": 10,
1724      "cost_per_1m_out": 30,
1725      "cost_per_1m_in_cached": 0,
1726      "cost_per_1m_out_cached": 0,
1727      "context_window": 128000,
1728      "default_max_tokens": 2048,
1729      "can_reason": false,
1730      "supports_attachments": true
1731    },
1732    {
1733      "id": "openai/gpt-4-1106-preview",
1734      "name": "OpenAI: GPT-4 Turbo (older v1106)",
1735      "cost_per_1m_in": 10,
1736      "cost_per_1m_out": 30,
1737      "cost_per_1m_in_cached": 0,
1738      "cost_per_1m_out_cached": 0,
1739      "context_window": 128000,
1740      "default_max_tokens": 2048,
1741      "can_reason": false,
1742      "supports_attachments": false
1743    },
1744    {
1745      "id": "openai/gpt-4-turbo-preview",
1746      "name": "OpenAI: GPT-4 Turbo Preview",
1747      "cost_per_1m_in": 10,
1748      "cost_per_1m_out": 30,
1749      "cost_per_1m_in_cached": 0,
1750      "cost_per_1m_out_cached": 0,
1751      "context_window": 128000,
1752      "default_max_tokens": 2048,
1753      "can_reason": false,
1754      "supports_attachments": false
1755    },
1756    {
1757      "id": "openai/gpt-4.1",
1758      "name": "OpenAI: GPT-4.1",
1759      "cost_per_1m_in": 2,
1760      "cost_per_1m_out": 8,
1761      "cost_per_1m_in_cached": 0,
1762      "cost_per_1m_out_cached": 0.5,
1763      "context_window": 1047576,
1764      "default_max_tokens": 104757,
1765      "can_reason": false,
1766      "supports_attachments": true
1767    },
1768    {
1769      "id": "openai/gpt-4.1-mini",
1770      "name": "OpenAI: GPT-4.1 Mini",
1771      "cost_per_1m_in": 0.4,
1772      "cost_per_1m_out": 1.6,
1773      "cost_per_1m_in_cached": 0,
1774      "cost_per_1m_out_cached": 0.1,
1775      "context_window": 1047576,
1776      "default_max_tokens": 104757,
1777      "can_reason": false,
1778      "supports_attachments": true
1779    },
1780    {
1781      "id": "openai/gpt-4.1-nano",
1782      "name": "OpenAI: GPT-4.1 Nano",
1783      "cost_per_1m_in": 0.1,
1784      "cost_per_1m_out": 0.4,
1785      "cost_per_1m_in_cached": 0,
1786      "cost_per_1m_out_cached": 0.03,
1787      "context_window": 1047576,
1788      "default_max_tokens": 104757,
1789      "can_reason": false,
1790      "supports_attachments": true
1791    },
1792    {
1793      "id": "openai/gpt-4o",
1794      "name": "OpenAI: GPT-4o",
1795      "cost_per_1m_in": 2.5,
1796      "cost_per_1m_out": 10,
1797      "cost_per_1m_in_cached": 0,
1798      "cost_per_1m_out_cached": 1.25,
1799      "context_window": 128000,
1800      "default_max_tokens": 8192,
1801      "can_reason": false,
1802      "supports_attachments": true
1803    },
1804    {
1805      "id": "openai/gpt-4o-2024-05-13",
1806      "name": "OpenAI: GPT-4o (2024-05-13)",
1807      "cost_per_1m_in": 5,
1808      "cost_per_1m_out": 15,
1809      "cost_per_1m_in_cached": 0,
1810      "cost_per_1m_out_cached": 0,
1811      "context_window": 128000,
1812      "default_max_tokens": 2048,
1813      "can_reason": false,
1814      "supports_attachments": true
1815    },
1816    {
1817      "id": "openai/gpt-4o-2024-08-06",
1818      "name": "OpenAI: GPT-4o (2024-08-06)",
1819      "cost_per_1m_in": 2.5,
1820      "cost_per_1m_out": 10,
1821      "cost_per_1m_in_cached": 0,
1822      "cost_per_1m_out_cached": 1.25,
1823      "context_window": 128000,
1824      "default_max_tokens": 8192,
1825      "can_reason": false,
1826      "supports_attachments": true
1827    },
1828    {
1829      "id": "openai/gpt-4o-2024-11-20",
1830      "name": "OpenAI: GPT-4o (2024-11-20)",
1831      "cost_per_1m_in": 2.5,
1832      "cost_per_1m_out": 10,
1833      "cost_per_1m_in_cached": 0,
1834      "cost_per_1m_out_cached": 1.25,
1835      "context_window": 128000,
1836      "default_max_tokens": 8192,
1837      "can_reason": false,
1838      "supports_attachments": true
1839    },
1840    {
1841      "id": "openai/gpt-4o-audio-preview",
1842      "name": "OpenAI: GPT-4o Audio",
1843      "cost_per_1m_in": 2.5,
1844      "cost_per_1m_out": 10,
1845      "cost_per_1m_in_cached": 0,
1846      "cost_per_1m_out_cached": 0,
1847      "context_window": 128000,
1848      "default_max_tokens": 8192,
1849      "can_reason": false,
1850      "supports_attachments": false
1851    },
1852    {
1853      "id": "openai/gpt-4o-mini",
1854      "name": "OpenAI: GPT-4o-mini",
1855      "cost_per_1m_in": 0.15,
1856      "cost_per_1m_out": 0.6,
1857      "cost_per_1m_in_cached": 0,
1858      "cost_per_1m_out_cached": 0.075,
1859      "context_window": 128000,
1860      "default_max_tokens": 8192,
1861      "can_reason": false,
1862      "supports_attachments": true
1863    },
1864    {
1865      "id": "openai/gpt-4o-mini-2024-07-18",
1866      "name": "OpenAI: GPT-4o-mini (2024-07-18)",
1867      "cost_per_1m_in": 0.15,
1868      "cost_per_1m_out": 0.6,
1869      "cost_per_1m_in_cached": 0,
1870      "cost_per_1m_out_cached": 0.075,
1871      "context_window": 128000,
1872      "default_max_tokens": 8192,
1873      "can_reason": false,
1874      "supports_attachments": true
1875    },
1876    {
1877      "id": "openai/gpt-5",
1878      "name": "OpenAI: GPT-5",
1879      "cost_per_1m_in": 1.25,
1880      "cost_per_1m_out": 10,
1881      "cost_per_1m_in_cached": 0,
1882      "cost_per_1m_out_cached": 0.125,
1883      "context_window": 400000,
1884      "default_max_tokens": 64000,
1885      "can_reason": true,
1886      "reasoning_levels": [
1887        "low",
1888        "medium",
1889        "high"
1890      ],
1891      "default_reasoning_effort": "medium",
1892      "supports_attachments": true
1893    },
1894    {
1895      "id": "openai/gpt-5-codex",
1896      "name": "OpenAI: GPT-5 Codex",
1897      "cost_per_1m_in": 1.25,
1898      "cost_per_1m_out": 10,
1899      "cost_per_1m_in_cached": 0,
1900      "cost_per_1m_out_cached": 0.125,
1901      "context_window": 400000,
1902      "default_max_tokens": 64000,
1903      "can_reason": true,
1904      "reasoning_levels": [
1905        "low",
1906        "medium",
1907        "high"
1908      ],
1909      "default_reasoning_effort": "medium",
1910      "supports_attachments": true
1911    },
1912    {
1913      "id": "openai/gpt-5-mini",
1914      "name": "OpenAI: GPT-5 Mini",
1915      "cost_per_1m_in": 0.25,
1916      "cost_per_1m_out": 2,
1917      "cost_per_1m_in_cached": 0,
1918      "cost_per_1m_out_cached": 0.03,
1919      "context_window": 400000,
1920      "default_max_tokens": 40000,
1921      "can_reason": true,
1922      "reasoning_levels": [
1923        "low",
1924        "medium",
1925        "high"
1926      ],
1927      "default_reasoning_effort": "medium",
1928      "supports_attachments": true
1929    },
1930    {
1931      "id": "openai/gpt-5-nano",
1932      "name": "OpenAI: GPT-5 Nano",
1933      "cost_per_1m_in": 0.05,
1934      "cost_per_1m_out": 0.4,
1935      "cost_per_1m_in_cached": 0,
1936      "cost_per_1m_out_cached": 0.005,
1937      "context_window": 400000,
1938      "default_max_tokens": 64000,
1939      "can_reason": true,
1940      "reasoning_levels": [
1941        "low",
1942        "medium",
1943        "high"
1944      ],
1945      "default_reasoning_effort": "medium",
1946      "supports_attachments": true
1947    },
1948    {
1949      "id": "openai/gpt-5-pro",
1950      "name": "OpenAI: GPT-5 Pro",
1951      "cost_per_1m_in": 15,
1952      "cost_per_1m_out": 120,
1953      "cost_per_1m_in_cached": 0,
1954      "cost_per_1m_out_cached": 0,
1955      "context_window": 400000,
1956      "default_max_tokens": 64000,
1957      "can_reason": true,
1958      "reasoning_levels": [
1959        "low",
1960        "medium",
1961        "high"
1962      ],
1963      "default_reasoning_effort": "medium",
1964      "supports_attachments": true
1965    },
1966    {
1967      "id": "openai/gpt-5.1",
1968      "name": "OpenAI: GPT-5.1",
1969      "cost_per_1m_in": 1.25,
1970      "cost_per_1m_out": 10,
1971      "cost_per_1m_in_cached": 0,
1972      "cost_per_1m_out_cached": 0.125,
1973      "context_window": 400000,
1974      "default_max_tokens": 64000,
1975      "can_reason": true,
1976      "reasoning_levels": [
1977        "low",
1978        "medium",
1979        "high"
1980      ],
1981      "default_reasoning_effort": "medium",
1982      "supports_attachments": true
1983    },
1984    {
1985      "id": "openai/gpt-5.1-chat",
1986      "name": "OpenAI: GPT-5.1 Chat",
1987      "cost_per_1m_in": 1.25,
1988      "cost_per_1m_out": 10,
1989      "cost_per_1m_in_cached": 0,
1990      "cost_per_1m_out_cached": 0.125,
1991      "context_window": 128000,
1992      "default_max_tokens": 8192,
1993      "can_reason": false,
1994      "supports_attachments": true
1995    },
1996    {
1997      "id": "openai/gpt-5.1-codex",
1998      "name": "OpenAI: GPT-5.1-Codex",
1999      "cost_per_1m_in": 1.25,
2000      "cost_per_1m_out": 10,
2001      "cost_per_1m_in_cached": 0,
2002      "cost_per_1m_out_cached": 0.125,
2003      "context_window": 400000,
2004      "default_max_tokens": 64000,
2005      "can_reason": true,
2006      "reasoning_levels": [
2007        "low",
2008        "medium",
2009        "high"
2010      ],
2011      "default_reasoning_effort": "medium",
2012      "supports_attachments": true
2013    },
2014    {
2015      "id": "openai/gpt-5.1-codex-max",
2016      "name": "OpenAI: GPT-5.1-Codex-Max",
2017      "cost_per_1m_in": 1.25,
2018      "cost_per_1m_out": 10,
2019      "cost_per_1m_in_cached": 0,
2020      "cost_per_1m_out_cached": 0.125,
2021      "context_window": 400000,
2022      "default_max_tokens": 64000,
2023      "can_reason": true,
2024      "reasoning_levels": [
2025        "low",
2026        "medium",
2027        "high"
2028      ],
2029      "default_reasoning_effort": "medium",
2030      "supports_attachments": true
2031    },
2032    {
2033      "id": "openai/gpt-5.1-codex-mini",
2034      "name": "OpenAI: GPT-5.1-Codex-Mini",
2035      "cost_per_1m_in": 0.25,
2036      "cost_per_1m_out": 2,
2037      "cost_per_1m_in_cached": 0,
2038      "cost_per_1m_out_cached": 0.025,
2039      "context_window": 400000,
2040      "default_max_tokens": 50000,
2041      "can_reason": true,
2042      "reasoning_levels": [
2043        "low",
2044        "medium",
2045        "high"
2046      ],
2047      "default_reasoning_effort": "medium",
2048      "supports_attachments": true
2049    },
2050    {
2051      "id": "openai/gpt-5.2",
2052      "name": "OpenAI: GPT-5.2",
2053      "cost_per_1m_in": 1.75,
2054      "cost_per_1m_out": 14,
2055      "cost_per_1m_in_cached": 0,
2056      "cost_per_1m_out_cached": 0.175,
2057      "context_window": 400000,
2058      "default_max_tokens": 64000,
2059      "can_reason": true,
2060      "reasoning_levels": [
2061        "low",
2062        "medium",
2063        "high"
2064      ],
2065      "default_reasoning_effort": "medium",
2066      "supports_attachments": true
2067    },
2068    {
2069      "id": "openai/gpt-5.2-chat",
2070      "name": "OpenAI: GPT-5.2 Chat",
2071      "cost_per_1m_in": 1.75,
2072      "cost_per_1m_out": 14,
2073      "cost_per_1m_in_cached": 0,
2074      "cost_per_1m_out_cached": 0.175,
2075      "context_window": 128000,
2076      "default_max_tokens": 8192,
2077      "can_reason": false,
2078      "supports_attachments": true
2079    },
2080    {
2081      "id": "openai/gpt-5.2-pro",
2082      "name": "OpenAI: GPT-5.2 Pro",
2083      "cost_per_1m_in": 21,
2084      "cost_per_1m_out": 168,
2085      "cost_per_1m_in_cached": 0,
2086      "cost_per_1m_out_cached": 0,
2087      "context_window": 400000,
2088      "default_max_tokens": 64000,
2089      "can_reason": true,
2090      "reasoning_levels": [
2091        "low",
2092        "medium",
2093        "high"
2094      ],
2095      "default_reasoning_effort": "medium",
2096      "supports_attachments": true
2097    },
2098    {
2099      "id": "openai/gpt-5.2-codex",
2100      "name": "OpenAI: GPT-5.2-Codex",
2101      "cost_per_1m_in": 1.75,
2102      "cost_per_1m_out": 14,
2103      "cost_per_1m_in_cached": 0,
2104      "cost_per_1m_out_cached": 0.175,
2105      "context_window": 400000,
2106      "default_max_tokens": 64000,
2107      "can_reason": true,
2108      "reasoning_levels": [
2109        "low",
2110        "medium",
2111        "high"
2112      ],
2113      "default_reasoning_effort": "medium",
2114      "supports_attachments": true
2115    },
2116    {
2117      "id": "openai/gpt-5.3-chat",
2118      "name": "OpenAI: GPT-5.3 Chat",
2119      "cost_per_1m_in": 1.75,
2120      "cost_per_1m_out": 14,
2121      "cost_per_1m_in_cached": 0,
2122      "cost_per_1m_out_cached": 0.175,
2123      "context_window": 128000,
2124      "default_max_tokens": 8192,
2125      "can_reason": false,
2126      "supports_attachments": true
2127    },
2128    {
2129      "id": "openai/gpt-5.3-codex",
2130      "name": "OpenAI: GPT-5.3-Codex",
2131      "cost_per_1m_in": 1.75,
2132      "cost_per_1m_out": 14,
2133      "cost_per_1m_in_cached": 0,
2134      "cost_per_1m_out_cached": 0.175,
2135      "context_window": 400000,
2136      "default_max_tokens": 64000,
2137      "can_reason": true,
2138      "reasoning_levels": [
2139        "low",
2140        "medium",
2141        "high"
2142      ],
2143      "default_reasoning_effort": "medium",
2144      "supports_attachments": true
2145    },
2146    {
2147      "id": "openai/gpt-5.4",
2148      "name": "OpenAI: GPT-5.4",
2149      "cost_per_1m_in": 2.5,
2150      "cost_per_1m_out": 15,
2151      "cost_per_1m_in_cached": 0,
2152      "cost_per_1m_out_cached": 0.25,
2153      "context_window": 1050000,
2154      "default_max_tokens": 64000,
2155      "can_reason": true,
2156      "reasoning_levels": [
2157        "low",
2158        "medium",
2159        "high"
2160      ],
2161      "default_reasoning_effort": "medium",
2162      "supports_attachments": true
2163    },
2164    {
2165      "id": "openai/gpt-5.4-mini",
2166      "name": "OpenAI: GPT-5.4 Mini",
2167      "cost_per_1m_in": 0.75,
2168      "cost_per_1m_out": 4.5,
2169      "cost_per_1m_in_cached": 0,
2170      "cost_per_1m_out_cached": 0.075,
2171      "context_window": 400000,
2172      "default_max_tokens": 64000,
2173      "can_reason": true,
2174      "reasoning_levels": [
2175        "low",
2176        "medium",
2177        "high"
2178      ],
2179      "default_reasoning_effort": "medium",
2180      "supports_attachments": true
2181    },
2182    {
2183      "id": "openai/gpt-5.4-nano",
2184      "name": "OpenAI: GPT-5.4 Nano",
2185      "cost_per_1m_in": 0.2,
2186      "cost_per_1m_out": 1.25,
2187      "cost_per_1m_in_cached": 0,
2188      "cost_per_1m_out_cached": 0.02,
2189      "context_window": 400000,
2190      "default_max_tokens": 64000,
2191      "can_reason": true,
2192      "reasoning_levels": [
2193        "low",
2194        "medium",
2195        "high"
2196      ],
2197      "default_reasoning_effort": "medium",
2198      "supports_attachments": true
2199    },
2200    {
2201      "id": "openai/gpt-5.4-pro",
2202      "name": "OpenAI: GPT-5.4 Pro",
2203      "cost_per_1m_in": 30,
2204      "cost_per_1m_out": 180,
2205      "cost_per_1m_in_cached": 0,
2206      "cost_per_1m_out_cached": 0,
2207      "context_window": 1050000,
2208      "default_max_tokens": 64000,
2209      "can_reason": true,
2210      "reasoning_levels": [
2211        "low",
2212        "medium",
2213        "high"
2214      ],
2215      "default_reasoning_effort": "medium",
2216      "supports_attachments": true
2217    },
2218    {
2219      "id": "openai/gpt-5.5",
2220      "name": "OpenAI: GPT-5.5",
2221      "cost_per_1m_in": 5,
2222      "cost_per_1m_out": 30,
2223      "cost_per_1m_in_cached": 0,
2224      "cost_per_1m_out_cached": 0.5,
2225      "context_window": 1050000,
2226      "default_max_tokens": 64000,
2227      "can_reason": true,
2228      "reasoning_levels": [
2229        "low",
2230        "medium",
2231        "high"
2232      ],
2233      "default_reasoning_effort": "medium",
2234      "supports_attachments": true
2235    },
2236    {
2237      "id": "openai/gpt-5.5-pro",
2238      "name": "OpenAI: GPT-5.5 Pro",
2239      "cost_per_1m_in": 30,
2240      "cost_per_1m_out": 180,
2241      "cost_per_1m_in_cached": 0,
2242      "cost_per_1m_out_cached": 0,
2243      "context_window": 1050000,
2244      "default_max_tokens": 64000,
2245      "can_reason": true,
2246      "reasoning_levels": [
2247        "low",
2248        "medium",
2249        "high"
2250      ],
2251      "default_reasoning_effort": "medium",
2252      "supports_attachments": true
2253    },
2254    {
2255      "id": "openai/gpt-oss-120b",
2256      "name": "OpenAI: gpt-oss-120b",
2257      "cost_per_1m_in": 0.05,
2258      "cost_per_1m_out": 0.25,
2259      "cost_per_1m_in_cached": 0,
2260      "cost_per_1m_out_cached": 0,
2261      "context_window": 131072,
2262      "default_max_tokens": 16384,
2263      "can_reason": true,
2264      "reasoning_levels": [
2265        "low",
2266        "medium",
2267        "high"
2268      ],
2269      "default_reasoning_effort": "medium",
2270      "supports_attachments": false
2271    },
2272    {
2273      "id": "openai/gpt-oss-120b:free",
2274      "name": "OpenAI: gpt-oss-120b (free)",
2275      "cost_per_1m_in": 0,
2276      "cost_per_1m_out": 0,
2277      "cost_per_1m_in_cached": 0,
2278      "cost_per_1m_out_cached": 0,
2279      "context_window": 131072,
2280      "default_max_tokens": 65536,
2281      "can_reason": true,
2282      "reasoning_levels": [
2283        "low",
2284        "medium",
2285        "high"
2286      ],
2287      "default_reasoning_effort": "medium",
2288      "supports_attachments": false
2289    },
2290    {
2291      "id": "openai/gpt-oss-20b",
2292      "name": "OpenAI: gpt-oss-20b",
2293      "cost_per_1m_in": 0.03,
2294      "cost_per_1m_out": 0.14,
2295      "cost_per_1m_in_cached": 0,
2296      "cost_per_1m_out_cached": 0,
2297      "context_window": 131072,
2298      "default_max_tokens": 65536,
2299      "can_reason": true,
2300      "reasoning_levels": [
2301        "low",
2302        "medium",
2303        "high"
2304      ],
2305      "default_reasoning_effort": "medium",
2306      "supports_attachments": false
2307    },
2308    {
2309      "id": "openai/gpt-oss-20b:free",
2310      "name": "OpenAI: gpt-oss-20b (free)",
2311      "cost_per_1m_in": 0,
2312      "cost_per_1m_out": 0,
2313      "cost_per_1m_in_cached": 0,
2314      "cost_per_1m_out_cached": 0,
2315      "context_window": 131072,
2316      "default_max_tokens": 4096,
2317      "can_reason": true,
2318      "reasoning_levels": [
2319        "low",
2320        "medium",
2321        "high"
2322      ],
2323      "default_reasoning_effort": "medium",
2324      "supports_attachments": false
2325    },
2326    {
2327      "id": "openai/gpt-oss-safeguard-20b",
2328      "name": "OpenAI: gpt-oss-safeguard-20b",
2329      "cost_per_1m_in": 0.075,
2330      "cost_per_1m_out": 0.3,
2331      "cost_per_1m_in_cached": 0,
2332      "cost_per_1m_out_cached": 0.037,
2333      "context_window": 131072,
2334      "default_max_tokens": 32768,
2335      "can_reason": true,
2336      "reasoning_levels": [
2337        "low",
2338        "medium",
2339        "high"
2340      ],
2341      "default_reasoning_effort": "medium",
2342      "supports_attachments": false
2343    },
2344    {
2345      "id": "openai/o1",
2346      "name": "OpenAI: o1",
2347      "cost_per_1m_in": 15,
2348      "cost_per_1m_out": 60,
2349      "cost_per_1m_in_cached": 0,
2350      "cost_per_1m_out_cached": 7.5,
2351      "context_window": 200000,
2352      "default_max_tokens": 50000,
2353      "can_reason": true,
2354      "reasoning_levels": [
2355        "low",
2356        "medium",
2357        "high"
2358      ],
2359      "default_reasoning_effort": "medium",
2360      "supports_attachments": true
2361    },
2362    {
2363      "id": "openai/o3",
2364      "name": "OpenAI: o3",
2365      "cost_per_1m_in": 2,
2366      "cost_per_1m_out": 8,
2367      "cost_per_1m_in_cached": 0,
2368      "cost_per_1m_out_cached": 0.5,
2369      "context_window": 200000,
2370      "default_max_tokens": 50000,
2371      "can_reason": true,
2372      "reasoning_levels": [
2373        "low",
2374        "medium",
2375        "high"
2376      ],
2377      "default_reasoning_effort": "medium",
2378      "supports_attachments": true
2379    },
2380    {
2381      "id": "openai/o3-deep-research",
2382      "name": "OpenAI: o3 Deep Research",
2383      "cost_per_1m_in": 10,
2384      "cost_per_1m_out": 40,
2385      "cost_per_1m_in_cached": 0,
2386      "cost_per_1m_out_cached": 2.5,
2387      "context_window": 200000,
2388      "default_max_tokens": 50000,
2389      "can_reason": true,
2390      "reasoning_levels": [
2391        "low",
2392        "medium",
2393        "high"
2394      ],
2395      "default_reasoning_effort": "medium",
2396      "supports_attachments": true
2397    },
2398    {
2399      "id": "openai/o3-mini",
2400      "name": "OpenAI: o3 Mini",
2401      "cost_per_1m_in": 1.1,
2402      "cost_per_1m_out": 4.4,
2403      "cost_per_1m_in_cached": 0,
2404      "cost_per_1m_out_cached": 0.55,
2405      "context_window": 200000,
2406      "default_max_tokens": 50000,
2407      "can_reason": true,
2408      "reasoning_levels": [
2409        "low",
2410        "medium",
2411        "high"
2412      ],
2413      "default_reasoning_effort": "medium",
2414      "supports_attachments": false
2415    },
2416    {
2417      "id": "openai/o3-mini-high",
2418      "name": "OpenAI: o3 Mini High",
2419      "cost_per_1m_in": 1.1,
2420      "cost_per_1m_out": 4.4,
2421      "cost_per_1m_in_cached": 0,
2422      "cost_per_1m_out_cached": 0.55,
2423      "context_window": 200000,
2424      "default_max_tokens": 50000,
2425      "can_reason": true,
2426      "reasoning_levels": [
2427        "low",
2428        "medium",
2429        "high"
2430      ],
2431      "default_reasoning_effort": "medium",
2432      "supports_attachments": false
2433    },
2434    {
2435      "id": "openai/o3-pro",
2436      "name": "OpenAI: o3 Pro",
2437      "cost_per_1m_in": 20,
2438      "cost_per_1m_out": 80,
2439      "cost_per_1m_in_cached": 0,
2440      "cost_per_1m_out_cached": 0,
2441      "context_window": 200000,
2442      "default_max_tokens": 50000,
2443      "can_reason": true,
2444      "reasoning_levels": [
2445        "low",
2446        "medium",
2447        "high"
2448      ],
2449      "default_reasoning_effort": "medium",
2450      "supports_attachments": true
2451    },
2452    {
2453      "id": "openai/o4-mini",
2454      "name": "OpenAI: o4 Mini",
2455      "cost_per_1m_in": 1.1,
2456      "cost_per_1m_out": 4.4,
2457      "cost_per_1m_in_cached": 0,
2458      "cost_per_1m_out_cached": 0.275,
2459      "context_window": 200000,
2460      "default_max_tokens": 50000,
2461      "can_reason": true,
2462      "reasoning_levels": [
2463        "low",
2464        "medium",
2465        "high"
2466      ],
2467      "default_reasoning_effort": "medium",
2468      "supports_attachments": true
2469    },
2470    {
2471      "id": "openai/o4-mini-deep-research",
2472      "name": "OpenAI: o4 Mini Deep Research",
2473      "cost_per_1m_in": 2,
2474      "cost_per_1m_out": 8,
2475      "cost_per_1m_in_cached": 0,
2476      "cost_per_1m_out_cached": 0.5,
2477      "context_window": 200000,
2478      "default_max_tokens": 50000,
2479      "can_reason": true,
2480      "reasoning_levels": [
2481        "low",
2482        "medium",
2483        "high"
2484      ],
2485      "default_reasoning_effort": "medium",
2486      "supports_attachments": true
2487    },
2488    {
2489      "id": "openai/o4-mini-high",
2490      "name": "OpenAI: o4 Mini High",
2491      "cost_per_1m_in": 1.1,
2492      "cost_per_1m_out": 4.4,
2493      "cost_per_1m_in_cached": 0,
2494      "cost_per_1m_out_cached": 0.275,
2495      "context_window": 200000,
2496      "default_max_tokens": 50000,
2497      "can_reason": true,
2498      "reasoning_levels": [
2499        "low",
2500        "medium",
2501        "high"
2502      ],
2503      "default_reasoning_effort": "medium",
2504      "supports_attachments": true
2505    },
2506    {
2507      "id": "openrouter/owl-alpha",
2508      "name": "Owl Alpha",
2509      "cost_per_1m_in": 0,
2510      "cost_per_1m_out": 0,
2511      "cost_per_1m_in_cached": 0,
2512      "cost_per_1m_out_cached": 0,
2513      "context_window": 1048756,
2514      "default_max_tokens": 131072,
2515      "can_reason": false,
2516      "supports_attachments": false
2517    },
2518    {
2519      "id": "poolside/laguna-m.1:free",
2520      "name": "Poolside: Laguna M.1 (free)",
2521      "cost_per_1m_in": 0,
2522      "cost_per_1m_out": 0,
2523      "cost_per_1m_in_cached": 0,
2524      "cost_per_1m_out_cached": 0,
2525      "context_window": 131072,
2526      "default_max_tokens": 4096,
2527      "can_reason": true,
2528      "reasoning_levels": [
2529        "low",
2530        "medium",
2531        "high"
2532      ],
2533      "default_reasoning_effort": "medium",
2534      "supports_attachments": false
2535    },
2536    {
2537      "id": "poolside/laguna-xs.2:free",
2538      "name": "Poolside: Laguna XS.2 (free)",
2539      "cost_per_1m_in": 0,
2540      "cost_per_1m_out": 0,
2541      "cost_per_1m_in_cached": 0,
2542      "cost_per_1m_out_cached": 0,
2543      "context_window": 131072,
2544      "default_max_tokens": 4096,
2545      "can_reason": true,
2546      "reasoning_levels": [
2547        "low",
2548        "medium",
2549        "high"
2550      ],
2551      "default_reasoning_effort": "medium",
2552      "supports_attachments": false
2553    },
2554    {
2555      "id": "prime-intellect/intellect-3",
2556      "name": "Prime Intellect: INTELLECT-3",
2557      "cost_per_1m_in": 0.2,
2558      "cost_per_1m_out": 1.1,
2559      "cost_per_1m_in_cached": 0,
2560      "cost_per_1m_out_cached": 0,
2561      "context_window": 131072,
2562      "default_max_tokens": 65536,
2563      "can_reason": true,
2564      "reasoning_levels": [
2565        "low",
2566        "medium",
2567        "high"
2568      ],
2569      "default_reasoning_effort": "medium",
2570      "supports_attachments": false
2571    },
2572    {
2573      "id": "qwen/qwen-2.5-72b-instruct",
2574      "name": "Qwen2.5 72B Instruct",
2575      "cost_per_1m_in": 0.36,
2576      "cost_per_1m_out": 0.4,
2577      "cost_per_1m_in_cached": 0,
2578      "cost_per_1m_out_cached": 0,
2579      "context_window": 32768,
2580      "default_max_tokens": 8192,
2581      "can_reason": false,
2582      "supports_attachments": false
2583    },
2584    {
2585      "id": "qwen/qwen-plus-2025-07-28",
2586      "name": "Qwen: Qwen Plus 0728",
2587      "cost_per_1m_in": 0.26,
2588      "cost_per_1m_out": 0.78,
2589      "cost_per_1m_in_cached": 0.325,
2590      "cost_per_1m_out_cached": 0,
2591      "context_window": 1000000,
2592      "default_max_tokens": 16384,
2593      "can_reason": false,
2594      "supports_attachments": false
2595    },
2596    {
2597      "id": "qwen/qwen-plus-2025-07-28:thinking",
2598      "name": "Qwen: Qwen Plus 0728 (thinking)",
2599      "cost_per_1m_in": 0.26,
2600      "cost_per_1m_out": 0.78,
2601      "cost_per_1m_in_cached": 0.325,
2602      "cost_per_1m_out_cached": 0,
2603      "context_window": 1000000,
2604      "default_max_tokens": 16384,
2605      "can_reason": true,
2606      "reasoning_levels": [
2607        "low",
2608        "medium",
2609        "high"
2610      ],
2611      "default_reasoning_effort": "medium",
2612      "supports_attachments": false
2613    },
2614    {
2615      "id": "qwen/qwen-vl-max",
2616      "name": "Qwen: Qwen VL Max",
2617      "cost_per_1m_in": 0.52,
2618      "cost_per_1m_out": 2.08,
2619      "cost_per_1m_in_cached": 0,
2620      "cost_per_1m_out_cached": 0,
2621      "context_window": 131072,
2622      "default_max_tokens": 16384,
2623      "can_reason": false,
2624      "supports_attachments": true
2625    },
2626    {
2627      "id": "qwen/qwen-max",
2628      "name": "Qwen: Qwen-Max ",
2629      "cost_per_1m_in": 1.04,
2630      "cost_per_1m_out": 4.16,
2631      "cost_per_1m_in_cached": 0,
2632      "cost_per_1m_out_cached": 0.208,
2633      "context_window": 32768,
2634      "default_max_tokens": 4096,
2635      "can_reason": false,
2636      "supports_attachments": false
2637    },
2638    {
2639      "id": "qwen/qwen-plus",
2640      "name": "Qwen: Qwen-Plus",
2641      "cost_per_1m_in": 0.26,
2642      "cost_per_1m_out": 0.78,
2643      "cost_per_1m_in_cached": 0.325,
2644      "cost_per_1m_out_cached": 0.052,
2645      "context_window": 1000000,
2646      "default_max_tokens": 16384,
2647      "can_reason": false,
2648      "supports_attachments": false
2649    },
2650    {
2651      "id": "qwen/qwen-turbo",
2652      "name": "Qwen: Qwen-Turbo",
2653      "cost_per_1m_in": 0.0325,
2654      "cost_per_1m_out": 0.13,
2655      "cost_per_1m_in_cached": 0,
2656      "cost_per_1m_out_cached": 0.0065,
2657      "context_window": 131072,
2658      "default_max_tokens": 4096,
2659      "can_reason": false,
2660      "supports_attachments": false
2661    },
2662    {
2663      "id": "qwen/qwen-2.5-7b-instruct",
2664      "name": "Qwen: Qwen2.5 7B Instruct",
2665      "cost_per_1m_in": 0.04,
2666      "cost_per_1m_out": 0.1,
2667      "cost_per_1m_in_cached": 0,
2668      "cost_per_1m_out_cached": 0.04,
2669      "context_window": 32768,
2670      "default_max_tokens": 4096,
2671      "can_reason": false,
2672      "supports_attachments": false
2673    },
2674    {
2675      "id": "qwen/qwen3-235b-a22b",
2676      "name": "Qwen: Qwen3 235B A22B",
2677      "cost_per_1m_in": 0.455,
2678      "cost_per_1m_out": 1.82,
2679      "cost_per_1m_in_cached": 0,
2680      "cost_per_1m_out_cached": 0,
2681      "context_window": 131072,
2682      "default_max_tokens": 4096,
2683      "can_reason": true,
2684      "reasoning_levels": [
2685        "low",
2686        "medium",
2687        "high"
2688      ],
2689      "default_reasoning_effort": "medium",
2690      "supports_attachments": false
2691    },
2692    {
2693      "id": "qwen/qwen3-235b-a22b-2507",
2694      "name": "Qwen: Qwen3 235B A22B Instruct 2507",
2695      "cost_per_1m_in": 0.22,
2696      "cost_per_1m_out": 0.88,
2697      "cost_per_1m_in_cached": 0,
2698      "cost_per_1m_out_cached": 0,
2699      "context_window": 262144,
2700      "default_max_tokens": 8192,
2701      "can_reason": false,
2702      "supports_attachments": false
2703    },
2704    {
2705      "id": "qwen/qwen3-235b-a22b-thinking-2507",
2706      "name": "Qwen: Qwen3 235B A22B Thinking 2507",
2707      "cost_per_1m_in": 0.1495,
2708      "cost_per_1m_out": 1.495,
2709      "cost_per_1m_in_cached": 0,
2710      "cost_per_1m_out_cached": 0,
2711      "context_window": 131072,
2712      "default_max_tokens": 13107,
2713      "can_reason": true,
2714      "reasoning_levels": [
2715        "low",
2716        "medium",
2717        "high"
2718      ],
2719      "default_reasoning_effort": "medium",
2720      "supports_attachments": false
2721    },
2722    {
2723      "id": "qwen/qwen3-30b-a3b",
2724      "name": "Qwen: Qwen3 30B A3B",
2725      "cost_per_1m_in": 0.13,
2726      "cost_per_1m_out": 0.52,
2727      "cost_per_1m_in_cached": 0,
2728      "cost_per_1m_out_cached": 0,
2729      "context_window": 131072,
2730      "default_max_tokens": 4096,
2731      "can_reason": true,
2732      "reasoning_levels": [
2733        "low",
2734        "medium",
2735        "high"
2736      ],
2737      "default_reasoning_effort": "medium",
2738      "supports_attachments": false
2739    },
2740    {
2741      "id": "qwen/qwen3-30b-a3b-instruct-2507",
2742      "name": "Qwen: Qwen3 30B A3B Instruct 2507",
2743      "cost_per_1m_in": 0.1,
2744      "cost_per_1m_out": 0.3,
2745      "cost_per_1m_in_cached": 0,
2746      "cost_per_1m_out_cached": 0,
2747      "context_window": 262144,
2748      "default_max_tokens": 26214,
2749      "can_reason": false,
2750      "supports_attachments": false
2751    },
2752    {
2753      "id": "qwen/qwen3-30b-a3b-thinking-2507",
2754      "name": "Qwen: Qwen3 30B A3B Thinking 2507",
2755      "cost_per_1m_in": 0.08,
2756      "cost_per_1m_out": 0.4,
2757      "cost_per_1m_in_cached": 0,
2758      "cost_per_1m_out_cached": 0.08,
2759      "context_window": 131072,
2760      "default_max_tokens": 65536,
2761      "can_reason": true,
2762      "reasoning_levels": [
2763        "low",
2764        "medium",
2765        "high"
2766      ],
2767      "default_reasoning_effort": "medium",
2768      "supports_attachments": false
2769    },
2770    {
2771      "id": "qwen/qwen3-32b",
2772      "name": "Qwen: Qwen3 32B",
2773      "cost_per_1m_in": 0.104,
2774      "cost_per_1m_out": 0.416,
2775      "cost_per_1m_in_cached": 0,
2776      "cost_per_1m_out_cached": 0,
2777      "context_window": 131072,
2778      "default_max_tokens": 4096,
2779      "can_reason": true,
2780      "reasoning_levels": [
2781        "low",
2782        "medium",
2783        "high"
2784      ],
2785      "default_reasoning_effort": "medium",
2786      "supports_attachments": false
2787    },
2788    {
2789      "id": "qwen/qwen3-8b",
2790      "name": "Qwen: Qwen3 8B",
2791      "cost_per_1m_in": 0.117,
2792      "cost_per_1m_out": 0.455,
2793      "cost_per_1m_in_cached": 0,
2794      "cost_per_1m_out_cached": 0,
2795      "context_window": 131072,
2796      "default_max_tokens": 4096,
2797      "can_reason": true,
2798      "reasoning_levels": [
2799        "low",
2800        "medium",
2801        "high"
2802      ],
2803      "default_reasoning_effort": "medium",
2804      "supports_attachments": false
2805    },
2806    {
2807      "id": "qwen/qwen3-coder-30b-a3b-instruct",
2808      "name": "Qwen: Qwen3 Coder 30B A3B Instruct",
2809      "cost_per_1m_in": 0.07,
2810      "cost_per_1m_out": 0.28,
2811      "cost_per_1m_in_cached": 0,
2812      "cost_per_1m_out_cached": 0,
2813      "context_window": 262144,
2814      "default_max_tokens": 131072,
2815      "can_reason": false,
2816      "supports_attachments": false
2817    },
2818    {
2819      "id": "qwen/qwen3-coder",
2820      "name": "Qwen: Qwen3 Coder 480B A35B",
2821      "cost_per_1m_in": 0.22,
2822      "cost_per_1m_out": 1.8,
2823      "cost_per_1m_in_cached": 0,
2824      "cost_per_1m_out_cached": 0,
2825      "context_window": 262144,
2826      "default_max_tokens": 32768,
2827      "can_reason": false,
2828      "supports_attachments": false
2829    },
2830    {
2831      "id": "qwen/qwen3-coder:free",
2832      "name": "Qwen: Qwen3 Coder 480B A35B (free)",
2833      "cost_per_1m_in": 0,
2834      "cost_per_1m_out": 0,
2835      "cost_per_1m_in_cached": 0,
2836      "cost_per_1m_out_cached": 0,
2837      "context_window": 262000,
2838      "default_max_tokens": 131000,
2839      "can_reason": false,
2840      "supports_attachments": false
2841    },
2842    {
2843      "id": "qwen/qwen3-coder-flash",
2844      "name": "Qwen: Qwen3 Coder Flash",
2845      "cost_per_1m_in": 0.195,
2846      "cost_per_1m_out": 0.975,
2847      "cost_per_1m_in_cached": 0.24375,
2848      "cost_per_1m_out_cached": 0.039,
2849      "context_window": 1000000,
2850      "default_max_tokens": 32768,
2851      "can_reason": false,
2852      "supports_attachments": false
2853    },
2854    {
2855      "id": "qwen/qwen3-coder-next",
2856      "name": "Qwen: Qwen3 Coder Next",
2857      "cost_per_1m_in": 0.12,
2858      "cost_per_1m_out": 0.8,
2859      "cost_per_1m_in_cached": 0,
2860      "cost_per_1m_out_cached": 0.07,
2861      "context_window": 262144,
2862      "default_max_tokens": 131072,
2863      "can_reason": false,
2864      "supports_attachments": false
2865    },
2866    {
2867      "id": "qwen/qwen3-coder-plus",
2868      "name": "Qwen: Qwen3 Coder Plus",
2869      "cost_per_1m_in": 0.65,
2870      "cost_per_1m_out": 3.25,
2871      "cost_per_1m_in_cached": 0.8125,
2872      "cost_per_1m_out_cached": 0.13,
2873      "context_window": 1000000,
2874      "default_max_tokens": 32768,
2875      "can_reason": false,
2876      "supports_attachments": false
2877    },
2878    {
2879      "id": "qwen/qwen3-max",
2880      "name": "Qwen: Qwen3 Max",
2881      "cost_per_1m_in": 0.78,
2882      "cost_per_1m_out": 3.9,
2883      "cost_per_1m_in_cached": 0.975,
2884      "cost_per_1m_out_cached": 0.156,
2885      "context_window": 262144,
2886      "default_max_tokens": 16384,
2887      "can_reason": false,
2888      "supports_attachments": false
2889    },
2890    {
2891      "id": "qwen/qwen3-max-thinking",
2892      "name": "Qwen: Qwen3 Max Thinking",
2893      "cost_per_1m_in": 0.78,
2894      "cost_per_1m_out": 3.9,
2895      "cost_per_1m_in_cached": 0,
2896      "cost_per_1m_out_cached": 0,
2897      "context_window": 262144,
2898      "default_max_tokens": 16384,
2899      "can_reason": true,
2900      "reasoning_levels": [
2901        "low",
2902        "medium",
2903        "high"
2904      ],
2905      "default_reasoning_effort": "medium",
2906      "supports_attachments": false
2907    },
2908    {
2909      "id": "qwen/qwen3-next-80b-a3b-instruct",
2910      "name": "Qwen: Qwen3 Next 80B A3B Instruct",
2911      "cost_per_1m_in": 0.09,
2912      "cost_per_1m_out": 1.1,
2913      "cost_per_1m_in_cached": 0,
2914      "cost_per_1m_out_cached": 0,
2915      "context_window": 262144,
2916      "default_max_tokens": 8192,
2917      "can_reason": false,
2918      "supports_attachments": false
2919    },
2920    {
2921      "id": "qwen/qwen3-next-80b-a3b-instruct:free",
2922      "name": "Qwen: Qwen3 Next 80B A3B Instruct (free)",
2923      "cost_per_1m_in": 0,
2924      "cost_per_1m_out": 0,
2925      "cost_per_1m_in_cached": 0,
2926      "cost_per_1m_out_cached": 0,
2927      "context_window": 262144,
2928      "default_max_tokens": 26214,
2929      "can_reason": false,
2930      "supports_attachments": false
2931    },
2932    {
2933      "id": "qwen/qwen3-next-80b-a3b-thinking",
2934      "name": "Qwen: Qwen3 Next 80B A3B Thinking",
2935      "cost_per_1m_in": 0.0975,
2936      "cost_per_1m_out": 0.78,
2937      "cost_per_1m_in_cached": 0,
2938      "cost_per_1m_out_cached": 0,
2939      "context_window": 131072,
2940      "default_max_tokens": 16384,
2941      "can_reason": true,
2942      "reasoning_levels": [
2943        "low",
2944        "medium",
2945        "high"
2946      ],
2947      "default_reasoning_effort": "medium",
2948      "supports_attachments": false
2949    },
2950    {
2951      "id": "qwen/qwen3-vl-235b-a22b-instruct",
2952      "name": "Qwen: Qwen3 VL 235B A22B Instruct",
2953      "cost_per_1m_in": 0.26,
2954      "cost_per_1m_out": 1.04,
2955      "cost_per_1m_in_cached": 0,
2956      "cost_per_1m_out_cached": 0,
2957      "context_window": 131072,
2958      "default_max_tokens": 16384,
2959      "can_reason": false,
2960      "supports_attachments": true
2961    },
2962    {
2963      "id": "qwen/qwen3-vl-235b-a22b-thinking",
2964      "name": "Qwen: Qwen3 VL 235B A22B Thinking",
2965      "cost_per_1m_in": 0.26,
2966      "cost_per_1m_out": 2.6,
2967      "cost_per_1m_in_cached": 0,
2968      "cost_per_1m_out_cached": 0,
2969      "context_window": 131072,
2970      "default_max_tokens": 16384,
2971      "can_reason": true,
2972      "reasoning_levels": [
2973        "low",
2974        "medium",
2975        "high"
2976      ],
2977      "default_reasoning_effort": "medium",
2978      "supports_attachments": true
2979    },
2980    {
2981      "id": "qwen/qwen3-vl-30b-a3b-instruct",
2982      "name": "Qwen: Qwen3 VL 30B A3B Instruct",
2983      "cost_per_1m_in": 0.15,
2984      "cost_per_1m_out": 0.6,
2985      "cost_per_1m_in_cached": 0,
2986      "cost_per_1m_out_cached": 0,
2987      "context_window": 262144,
2988      "default_max_tokens": 8192,
2989      "can_reason": false,
2990      "supports_attachments": true
2991    },
2992    {
2993      "id": "qwen/qwen3-vl-30b-a3b-thinking",
2994      "name": "Qwen: Qwen3 VL 30B A3B Thinking",
2995      "cost_per_1m_in": 0.13,
2996      "cost_per_1m_out": 1.56,
2997      "cost_per_1m_in_cached": 0,
2998      "cost_per_1m_out_cached": 0,
2999      "context_window": 131072,
3000      "default_max_tokens": 16384,
3001      "can_reason": true,
3002      "reasoning_levels": [
3003        "low",
3004        "medium",
3005        "high"
3006      ],
3007      "default_reasoning_effort": "medium",
3008      "supports_attachments": true
3009    },
3010    {
3011      "id": "qwen/qwen3-vl-32b-instruct",
3012      "name": "Qwen: Qwen3 VL 32B Instruct",
3013      "cost_per_1m_in": 0.104,
3014      "cost_per_1m_out": 0.416,
3015      "cost_per_1m_in_cached": 0,
3016      "cost_per_1m_out_cached": 0,
3017      "context_window": 131072,
3018      "default_max_tokens": 16384,
3019      "can_reason": false,
3020      "supports_attachments": true
3021    },
3022    {
3023      "id": "qwen/qwen3-vl-8b-instruct",
3024      "name": "Qwen: Qwen3 VL 8B Instruct",
3025      "cost_per_1m_in": 0.25,
3026      "cost_per_1m_out": 0.75,
3027      "cost_per_1m_in_cached": 0,
3028      "cost_per_1m_out_cached": 0.12,
3029      "context_window": 262144,
3030      "default_max_tokens": 131072,
3031      "can_reason": false,
3032      "supports_attachments": true
3033    },
3034    {
3035      "id": "qwen/qwen3-vl-8b-thinking",
3036      "name": "Qwen: Qwen3 VL 8B Thinking",
3037      "cost_per_1m_in": 0.117,
3038      "cost_per_1m_out": 1.365,
3039      "cost_per_1m_in_cached": 0,
3040      "cost_per_1m_out_cached": 0,
3041      "context_window": 131072,
3042      "default_max_tokens": 16384,
3043      "can_reason": true,
3044      "reasoning_levels": [
3045        "low",
3046        "medium",
3047        "high"
3048      ],
3049      "default_reasoning_effort": "medium",
3050      "supports_attachments": true
3051    },
3052    {
3053      "id": "qwen/qwen3.5-397b-a17b",
3054      "name": "Qwen: Qwen3.5 397B A17B",
3055      "cost_per_1m_in": 0.6,
3056      "cost_per_1m_out": 3.6,
3057      "cost_per_1m_in_cached": 0,
3058      "cost_per_1m_out_cached": 0,
3059      "context_window": 262144,
3060      "default_max_tokens": 32768,
3061      "can_reason": true,
3062      "reasoning_levels": [
3063        "low",
3064        "medium",
3065        "high"
3066      ],
3067      "default_reasoning_effort": "medium",
3068      "supports_attachments": true
3069    },
3070    {
3071      "id": "qwen/qwen3.5-plus-02-15",
3072      "name": "Qwen: Qwen3.5 Plus 2026-02-15",
3073      "cost_per_1m_in": 0.26,
3074      "cost_per_1m_out": 1.56,
3075      "cost_per_1m_in_cached": 0.325,
3076      "cost_per_1m_out_cached": 0,
3077      "context_window": 1000000,
3078      "default_max_tokens": 32768,
3079      "can_reason": true,
3080      "reasoning_levels": [
3081        "low",
3082        "medium",
3083        "high"
3084      ],
3085      "default_reasoning_effort": "medium",
3086      "supports_attachments": true
3087    },
3088    {
3089      "id": "qwen/qwen3.5-plus-20260420",
3090      "name": "Qwen: Qwen3.5 Plus 2026-04-20",
3091      "cost_per_1m_in": 0.4,
3092      "cost_per_1m_out": 2.4,
3093      "cost_per_1m_in_cached": 0,
3094      "cost_per_1m_out_cached": 0,
3095      "context_window": 1000000,
3096      "default_max_tokens": 32768,
3097      "can_reason": true,
3098      "reasoning_levels": [
3099        "low",
3100        "medium",
3101        "high"
3102      ],
3103      "default_reasoning_effort": "medium",
3104      "supports_attachments": true
3105    },
3106    {
3107      "id": "qwen/qwen3.5-122b-a10b",
3108      "name": "Qwen: Qwen3.5-122B-A10B",
3109      "cost_per_1m_in": 0.26,
3110      "cost_per_1m_out": 2.08,
3111      "cost_per_1m_in_cached": 0,
3112      "cost_per_1m_out_cached": 0,
3113      "context_window": 262144,
3114      "default_max_tokens": 32768,
3115      "can_reason": true,
3116      "reasoning_levels": [
3117        "low",
3118        "medium",
3119        "high"
3120      ],
3121      "default_reasoning_effort": "medium",
3122      "supports_attachments": true
3123    },
3124    {
3125      "id": "qwen/qwen3.5-27b",
3126      "name": "Qwen: Qwen3.5-27B",
3127      "cost_per_1m_in": 0.3,
3128      "cost_per_1m_out": 2.4,
3129      "cost_per_1m_in_cached": 0,
3130      "cost_per_1m_out_cached": 0,
3131      "context_window": 262144,
3132      "default_max_tokens": 32768,
3133      "can_reason": true,
3134      "reasoning_levels": [
3135        "low",
3136        "medium",
3137        "high"
3138      ],
3139      "default_reasoning_effort": "medium",
3140      "supports_attachments": true
3141    },
3142    {
3143      "id": "qwen/qwen3.5-35b-a3b",
3144      "name": "Qwen: Qwen3.5-35B-A3B",
3145      "cost_per_1m_in": 0.2,
3146      "cost_per_1m_out": 1,
3147      "cost_per_1m_in_cached": 0,
3148      "cost_per_1m_out_cached": 0.05,
3149      "context_window": 262144,
3150      "default_max_tokens": 131072,
3151      "can_reason": true,
3152      "reasoning_levels": [
3153        "low",
3154        "medium",
3155        "high"
3156      ],
3157      "default_reasoning_effort": "medium",
3158      "supports_attachments": true
3159    },
3160    {
3161      "id": "qwen/qwen3.5-9b",
3162      "name": "Qwen: Qwen3.5-9B",
3163      "cost_per_1m_in": 0.1,
3164      "cost_per_1m_out": 0.15,
3165      "cost_per_1m_in_cached": 0,
3166      "cost_per_1m_out_cached": 0,
3167      "context_window": 262144,
3168      "default_max_tokens": 26214,
3169      "can_reason": true,
3170      "reasoning_levels": [
3171        "low",
3172        "medium",
3173        "high"
3174      ],
3175      "default_reasoning_effort": "medium",
3176      "supports_attachments": true
3177    },
3178    {
3179      "id": "qwen/qwen3.5-flash-02-23",
3180      "name": "Qwen: Qwen3.5-Flash",
3181      "cost_per_1m_in": 0.065,
3182      "cost_per_1m_out": 0.26,
3183      "cost_per_1m_in_cached": 0.08125,
3184      "cost_per_1m_out_cached": 0,
3185      "context_window": 1000000,
3186      "default_max_tokens": 32768,
3187      "can_reason": true,
3188      "reasoning_levels": [
3189        "low",
3190        "medium",
3191        "high"
3192      ],
3193      "default_reasoning_effort": "medium",
3194      "supports_attachments": true
3195    },
3196    {
3197      "id": "qwen/qwen3.6-27b",
3198      "name": "Qwen: Qwen3.6 27B",
3199      "cost_per_1m_in": 0.32,
3200      "cost_per_1m_out": 3.2,
3201      "cost_per_1m_in_cached": 0,
3202      "cost_per_1m_out_cached": 0,
3203      "context_window": 262144,
3204      "default_max_tokens": 40960,
3205      "can_reason": true,
3206      "reasoning_levels": [
3207        "low",
3208        "medium",
3209        "high"
3210      ],
3211      "default_reasoning_effort": "medium",
3212      "supports_attachments": true
3213    },
3214    {
3215      "id": "qwen/qwen3.6-flash",
3216      "name": "Qwen: Qwen3.6 Flash",
3217      "cost_per_1m_in": 0.25,
3218      "cost_per_1m_out": 1.5,
3219      "cost_per_1m_in_cached": 0.3125,
3220      "cost_per_1m_out_cached": 0,
3221      "context_window": 1000000,
3222      "default_max_tokens": 32768,
3223      "can_reason": true,
3224      "reasoning_levels": [
3225        "low",
3226        "medium",
3227        "high"
3228      ],
3229      "default_reasoning_effort": "medium",
3230      "supports_attachments": true
3231    },
3232    {
3233      "id": "qwen/qwen3.6-max-preview",
3234      "name": "Qwen: Qwen3.6 Max Preview",
3235      "cost_per_1m_in": 1.04,
3236      "cost_per_1m_out": 6.24,
3237      "cost_per_1m_in_cached": 1.3,
3238      "cost_per_1m_out_cached": 0,
3239      "context_window": 262144,
3240      "default_max_tokens": 32768,
3241      "can_reason": true,
3242      "reasoning_levels": [
3243        "low",
3244        "medium",
3245        "high"
3246      ],
3247      "default_reasoning_effort": "medium",
3248      "supports_attachments": false
3249    },
3250    {
3251      "id": "qwen/qwen3.6-plus",
3252      "name": "Qwen: Qwen3.6 Plus",
3253      "cost_per_1m_in": 0.325,
3254      "cost_per_1m_out": 1.95,
3255      "cost_per_1m_in_cached": 0.40625,
3256      "cost_per_1m_out_cached": 0,
3257      "context_window": 1000000,
3258      "default_max_tokens": 32768,
3259      "can_reason": true,
3260      "reasoning_levels": [
3261        "low",
3262        "medium",
3263        "high"
3264      ],
3265      "default_reasoning_effort": "medium",
3266      "supports_attachments": true
3267    },
3268    {
3269      "id": "relace/relace-search",
3270      "name": "Relace: Relace Search",
3271      "cost_per_1m_in": 1,
3272      "cost_per_1m_out": 3,
3273      "cost_per_1m_in_cached": 0,
3274      "cost_per_1m_out_cached": 0,
3275      "context_window": 256000,
3276      "default_max_tokens": 64000,
3277      "can_reason": false,
3278      "supports_attachments": false
3279    },
3280    {
3281      "id": "stepfun/step-3.5-flash",
3282      "name": "StepFun: Step 3.5 Flash",
3283      "cost_per_1m_in": 0.1,
3284      "cost_per_1m_out": 0.3,
3285      "cost_per_1m_in_cached": 0,
3286      "cost_per_1m_out_cached": 0.02,
3287      "context_window": 262144,
3288      "default_max_tokens": 8192,
3289      "can_reason": true,
3290      "reasoning_levels": [
3291        "low",
3292        "medium",
3293        "high"
3294      ],
3295      "default_reasoning_effort": "medium",
3296      "supports_attachments": false
3297    },
3298    {
3299      "id": "tngtech/deepseek-r1t2-chimera",
3300      "name": "TNG: DeepSeek R1T2 Chimera",
3301      "cost_per_1m_in": 0.3,
3302      "cost_per_1m_out": 1.1,
3303      "cost_per_1m_in_cached": 0,
3304      "cost_per_1m_out_cached": 0.15,
3305      "context_window": 163840,
3306      "default_max_tokens": 81920,
3307      "can_reason": true,
3308      "reasoning_levels": [
3309        "low",
3310        "medium",
3311        "high"
3312      ],
3313      "default_reasoning_effort": "medium",
3314      "supports_attachments": false
3315    },
3316    {
3317      "id": "tencent/hy3-preview:free",
3318      "name": "Tencent: Hy3 preview (free)",
3319      "cost_per_1m_in": 0,
3320      "cost_per_1m_out": 0,
3321      "cost_per_1m_in_cached": 0,
3322      "cost_per_1m_out_cached": 0,
3323      "context_window": 262144,
3324      "default_max_tokens": 131072,
3325      "can_reason": true,
3326      "reasoning_levels": [
3327        "low",
3328        "medium",
3329        "high"
3330      ],
3331      "default_reasoning_effort": "medium",
3332      "supports_attachments": false
3333    },
3334    {
3335      "id": "thedrummer/rocinante-12b",
3336      "name": "TheDrummer: Rocinante 12B",
3337      "cost_per_1m_in": 0.17,
3338      "cost_per_1m_out": 0.43,
3339      "cost_per_1m_in_cached": 0,
3340      "cost_per_1m_out_cached": 0,
3341      "context_window": 32768,
3342      "default_max_tokens": 16384,
3343      "can_reason": false,
3344      "supports_attachments": false
3345    },
3346    {
3347      "id": "thedrummer/unslopnemo-12b",
3348      "name": "TheDrummer: UnslopNemo 12B",
3349      "cost_per_1m_in": 0.4,
3350      "cost_per_1m_out": 0.4,
3351      "cost_per_1m_in_cached": 0,
3352      "cost_per_1m_out_cached": 0,
3353      "context_window": 32768,
3354      "default_max_tokens": 16384,
3355      "can_reason": false,
3356      "supports_attachments": false
3357    },
3358    {
3359      "id": "alibaba/tongyi-deepresearch-30b-a3b",
3360      "name": "Tongyi DeepResearch 30B A3B",
3361      "cost_per_1m_in": 0.09,
3362      "cost_per_1m_out": 0.45,
3363      "cost_per_1m_in_cached": 0,
3364      "cost_per_1m_out_cached": 0.09,
3365      "context_window": 131072,
3366      "default_max_tokens": 65536,
3367      "can_reason": true,
3368      "reasoning_levels": [
3369        "low",
3370        "medium",
3371        "high"
3372      ],
3373      "default_reasoning_effort": "medium",
3374      "supports_attachments": false
3375    },
3376    {
3377      "id": "upstage/solar-pro-3",
3378      "name": "Upstage: Solar Pro 3",
3379      "cost_per_1m_in": 0.15,
3380      "cost_per_1m_out": 0.6,
3381      "cost_per_1m_in_cached": 0,
3382      "cost_per_1m_out_cached": 0.015,
3383      "context_window": 128000,
3384      "default_max_tokens": 12800,
3385      "can_reason": true,
3386      "reasoning_levels": [
3387        "low",
3388        "medium",
3389        "high"
3390      ],
3391      "default_reasoning_effort": "medium",
3392      "supports_attachments": false
3393    },
3394    {
3395      "id": "xiaomi/mimo-v2-flash",
3396      "name": "Xiaomi: MiMo-V2-Flash",
3397      "cost_per_1m_in": 0.1,
3398      "cost_per_1m_out": 0.3,
3399      "cost_per_1m_in_cached": 0,
3400      "cost_per_1m_out_cached": 0.02,
3401      "context_window": 262144,
3402      "default_max_tokens": 16000,
3403      "can_reason": true,
3404      "reasoning_levels": [
3405        "low",
3406        "medium",
3407        "high"
3408      ],
3409      "default_reasoning_effort": "medium",
3410      "supports_attachments": false
3411    },
3412    {
3413      "id": "xiaomi/mimo-v2-omni",
3414      "name": "Xiaomi: MiMo-V2-Omni",
3415      "cost_per_1m_in": 0.4,
3416      "cost_per_1m_out": 2,
3417      "cost_per_1m_in_cached": 0,
3418      "cost_per_1m_out_cached": 0.08,
3419      "context_window": 262144,
3420      "default_max_tokens": 32768,
3421      "can_reason": true,
3422      "reasoning_levels": [
3423        "low",
3424        "medium",
3425        "high"
3426      ],
3427      "default_reasoning_effort": "medium",
3428      "supports_attachments": true
3429    },
3430    {
3431      "id": "xiaomi/mimo-v2-pro",
3432      "name": "Xiaomi: MiMo-V2-Pro",
3433      "cost_per_1m_in": 1,
3434      "cost_per_1m_out": 3,
3435      "cost_per_1m_in_cached": 0,
3436      "cost_per_1m_out_cached": 0.2,
3437      "context_window": 1048576,
3438      "default_max_tokens": 65536,
3439      "can_reason": true,
3440      "reasoning_levels": [
3441        "low",
3442        "medium",
3443        "high"
3444      ],
3445      "default_reasoning_effort": "medium",
3446      "supports_attachments": false
3447    },
3448    {
3449      "id": "xiaomi/mimo-v2.5",
3450      "name": "Xiaomi: MiMo-V2.5",
3451      "cost_per_1m_in": 0.4,
3452      "cost_per_1m_out": 2,
3453      "cost_per_1m_in_cached": 0,
3454      "cost_per_1m_out_cached": 0.08,
3455      "context_window": 1048576,
3456      "default_max_tokens": 65536,
3457      "can_reason": true,
3458      "reasoning_levels": [
3459        "low",
3460        "medium",
3461        "high"
3462      ],
3463      "default_reasoning_effort": "medium",
3464      "supports_attachments": true
3465    },
3466    {
3467      "id": "xiaomi/mimo-v2.5-pro",
3468      "name": "Xiaomi: MiMo-V2.5-Pro",
3469      "cost_per_1m_in": 1,
3470      "cost_per_1m_out": 3,
3471      "cost_per_1m_in_cached": 0,
3472      "cost_per_1m_out_cached": 0.2,
3473      "context_window": 1048576,
3474      "default_max_tokens": 65536,
3475      "can_reason": true,
3476      "reasoning_levels": [
3477        "low",
3478        "medium",
3479        "high"
3480      ],
3481      "default_reasoning_effort": "medium",
3482      "supports_attachments": false
3483    },
3484    {
3485      "id": "z-ai/glm-4-32b",
3486      "name": "Z.ai: GLM 4 32B ",
3487      "cost_per_1m_in": 0.1,
3488      "cost_per_1m_out": 0.1,
3489      "cost_per_1m_in_cached": 0,
3490      "cost_per_1m_out_cached": 0,
3491      "context_window": 128000,
3492      "default_max_tokens": 12800,
3493      "can_reason": false,
3494      "supports_attachments": false
3495    },
3496    {
3497      "id": "z-ai/glm-4.5",
3498      "name": "Z.ai: GLM 4.5",
3499      "cost_per_1m_in": 0.6,
3500      "cost_per_1m_out": 2.2,
3501      "cost_per_1m_in_cached": 0,
3502      "cost_per_1m_out_cached": 0.11,
3503      "context_window": 131072,
3504      "default_max_tokens": 49152,
3505      "can_reason": true,
3506      "reasoning_levels": [
3507        "low",
3508        "medium",
3509        "high"
3510      ],
3511      "default_reasoning_effort": "medium",
3512      "supports_attachments": false
3513    },
3514    {
3515      "id": "z-ai/glm-4.5-air",
3516      "name": "Z.ai: GLM 4.5 Air",
3517      "cost_per_1m_in": 0.14,
3518      "cost_per_1m_out": 0.86,
3519      "cost_per_1m_in_cached": 0,
3520      "cost_per_1m_out_cached": 0,
3521      "context_window": 131072,
3522      "default_max_tokens": 65536,
3523      "can_reason": true,
3524      "reasoning_levels": [
3525        "low",
3526        "medium",
3527        "high"
3528      ],
3529      "default_reasoning_effort": "medium",
3530      "supports_attachments": false
3531    },
3532    {
3533      "id": "z-ai/glm-4.5-air:free",
3534      "name": "Z.ai: GLM 4.5 Air (free)",
3535      "cost_per_1m_in": 0,
3536      "cost_per_1m_out": 0,
3537      "cost_per_1m_in_cached": 0,
3538      "cost_per_1m_out_cached": 0,
3539      "context_window": 131072,
3540      "default_max_tokens": 48000,
3541      "can_reason": true,
3542      "reasoning_levels": [
3543        "low",
3544        "medium",
3545        "high"
3546      ],
3547      "default_reasoning_effort": "medium",
3548      "supports_attachments": false
3549    },
3550    {
3551      "id": "z-ai/glm-4.5v",
3552      "name": "Z.ai: GLM 4.5V",
3553      "cost_per_1m_in": 0.6,
3554      "cost_per_1m_out": 1.8,
3555      "cost_per_1m_in_cached": 0,
3556      "cost_per_1m_out_cached": 0.11,
3557      "context_window": 65536,
3558      "default_max_tokens": 8192,
3559      "can_reason": true,
3560      "reasoning_levels": [
3561        "low",
3562        "medium",
3563        "high"
3564      ],
3565      "default_reasoning_effort": "medium",
3566      "supports_attachments": true
3567    },
3568    {
3569      "id": "z-ai/glm-4.6",
3570      "name": "Z.ai: GLM 4.6",
3571      "cost_per_1m_in": 0.39,
3572      "cost_per_1m_out": 1.9,
3573      "cost_per_1m_in_cached": 0,
3574      "cost_per_1m_out_cached": 0,
3575      "context_window": 204800,
3576      "default_max_tokens": 102400,
3577      "can_reason": true,
3578      "reasoning_levels": [
3579        "low",
3580        "medium",
3581        "high"
3582      ],
3583      "default_reasoning_effort": "medium",
3584      "supports_attachments": false
3585    },
3586    {
3587      "id": "z-ai/glm-4.6v",
3588      "name": "Z.ai: GLM 4.6V",
3589      "cost_per_1m_in": 0.3,
3590      "cost_per_1m_out": 0.9,
3591      "cost_per_1m_in_cached": 0,
3592      "cost_per_1m_out_cached": 0.05,
3593      "context_window": 131072,
3594      "default_max_tokens": 12000,
3595      "can_reason": true,
3596      "reasoning_levels": [
3597        "low",
3598        "medium",
3599        "high"
3600      ],
3601      "default_reasoning_effort": "medium",
3602      "supports_attachments": true
3603    },
3604    {
3605      "id": "z-ai/glm-4.7",
3606      "name": "Z.ai: GLM 4.7",
3607      "cost_per_1m_in": 0.45,
3608      "cost_per_1m_out": 2.2,
3609      "cost_per_1m_in_cached": 0,
3610      "cost_per_1m_out_cached": 0.11,
3611      "context_window": 204800,
3612      "default_max_tokens": 102400,
3613      "can_reason": true,
3614      "reasoning_levels": [
3615        "low",
3616        "medium",
3617        "high"
3618      ],
3619      "default_reasoning_effort": "medium",
3620      "supports_attachments": false
3621    },
3622    {
3623      "id": "z-ai/glm-4.7-flash",
3624      "name": "Z.ai: GLM 4.7 Flash",
3625      "cost_per_1m_in": 0.06,
3626      "cost_per_1m_out": 0.4,
3627      "cost_per_1m_in_cached": 0,
3628      "cost_per_1m_out_cached": 0.01,
3629      "context_window": 202752,
3630      "default_max_tokens": 8192,
3631      "can_reason": true,
3632      "reasoning_levels": [
3633        "low",
3634        "medium",
3635        "high"
3636      ],
3637      "default_reasoning_effort": "medium",
3638      "supports_attachments": false
3639    },
3640    {
3641      "id": "z-ai/glm-5",
3642      "name": "Z.ai: GLM 5",
3643      "cost_per_1m_in": 0.95,
3644      "cost_per_1m_out": 2.55,
3645      "cost_per_1m_in_cached": 0,
3646      "cost_per_1m_out_cached": 0.2,
3647      "context_window": 204800,
3648      "default_max_tokens": 65536,
3649      "can_reason": true,
3650      "reasoning_levels": [
3651        "low",
3652        "medium",
3653        "high"
3654      ],
3655      "default_reasoning_effort": "medium",
3656      "supports_attachments": false
3657    },
3658    {
3659      "id": "z-ai/glm-5-turbo",
3660      "name": "Z.ai: GLM 5 Turbo",
3661      "cost_per_1m_in": 1.2,
3662      "cost_per_1m_out": 4,
3663      "cost_per_1m_in_cached": 0,
3664      "cost_per_1m_out_cached": 0.24,
3665      "context_window": 262144,
3666      "default_max_tokens": 65536,
3667      "can_reason": true,
3668      "reasoning_levels": [
3669        "low",
3670        "medium",
3671        "high"
3672      ],
3673      "default_reasoning_effort": "medium",
3674      "supports_attachments": false
3675    },
3676    {
3677      "id": "z-ai/glm-5.1",
3678      "name": "Z.ai: GLM 5.1",
3679      "cost_per_1m_in": 1.4,
3680      "cost_per_1m_out": 4.4,
3681      "cost_per_1m_in_cached": 0,
3682      "cost_per_1m_out_cached": 0.26,
3683      "context_window": 204800,
3684      "default_max_tokens": 65536,
3685      "can_reason": true,
3686      "reasoning_levels": [
3687        "low",
3688        "medium",
3689        "high"
3690      ],
3691      "default_reasoning_effort": "medium",
3692      "supports_attachments": false
3693    },
3694    {
3695      "id": "z-ai/glm-5v-turbo",
3696      "name": "Z.ai: GLM 5V Turbo",
3697      "cost_per_1m_in": 1.2,
3698      "cost_per_1m_out": 4,
3699      "cost_per_1m_in_cached": 0,
3700      "cost_per_1m_out_cached": 0.24,
3701      "context_window": 202752,
3702      "default_max_tokens": 65536,
3703      "can_reason": true,
3704      "reasoning_levels": [
3705        "low",
3706        "medium",
3707        "high"
3708      ],
3709      "default_reasoning_effort": "medium",
3710      "supports_attachments": true
3711    },
3712    {
3713      "id": "inclusionai/ling-2.6-1t:free",
3714      "name": "inclusionAI: Ling-2.6-1T (free)",
3715      "cost_per_1m_in": 0,
3716      "cost_per_1m_out": 0,
3717      "cost_per_1m_in_cached": 0,
3718      "cost_per_1m_out_cached": 0,
3719      "context_window": 262144,
3720      "default_max_tokens": 16384,
3721      "can_reason": false,
3722      "supports_attachments": false
3723    },
3724    {
3725      "id": "inclusionai/ling-2.6-flash",
3726      "name": "inclusionAI: Ling-2.6-flash",
3727      "cost_per_1m_in": 0.08,
3728      "cost_per_1m_out": 0.24,
3729      "cost_per_1m_in_cached": 0,
3730      "cost_per_1m_out_cached": 0.016,
3731      "context_window": 262144,
3732      "default_max_tokens": 16384,
3733      "can_reason": false,
3734      "supports_attachments": false
3735    },
3736    {
3737      "id": "x-ai/grok-3",
3738      "name": "xAI: Grok 3",
3739      "cost_per_1m_in": 3,
3740      "cost_per_1m_out": 15,
3741      "cost_per_1m_in_cached": 0,
3742      "cost_per_1m_out_cached": 0.75,
3743      "context_window": 131072,
3744      "default_max_tokens": 13107,
3745      "can_reason": false,
3746      "supports_attachments": false
3747    },
3748    {
3749      "id": "x-ai/grok-3-beta",
3750      "name": "xAI: Grok 3 Beta",
3751      "cost_per_1m_in": 3,
3752      "cost_per_1m_out": 15,
3753      "cost_per_1m_in_cached": 0,
3754      "cost_per_1m_out_cached": 0.75,
3755      "context_window": 131072,
3756      "default_max_tokens": 13107,
3757      "can_reason": false,
3758      "supports_attachments": false
3759    },
3760    {
3761      "id": "x-ai/grok-3-mini",
3762      "name": "xAI: Grok 3 Mini",
3763      "cost_per_1m_in": 0.3,
3764      "cost_per_1m_out": 0.5,
3765      "cost_per_1m_in_cached": 0,
3766      "cost_per_1m_out_cached": 0.075,
3767      "context_window": 131072,
3768      "default_max_tokens": 13107,
3769      "can_reason": true,
3770      "reasoning_levels": [
3771        "low",
3772        "medium",
3773        "high"
3774      ],
3775      "default_reasoning_effort": "medium",
3776      "supports_attachments": false
3777    },
3778    {
3779      "id": "x-ai/grok-3-mini-beta",
3780      "name": "xAI: Grok 3 Mini Beta",
3781      "cost_per_1m_in": 0.3,
3782      "cost_per_1m_out": 0.5,
3783      "cost_per_1m_in_cached": 0,
3784      "cost_per_1m_out_cached": 0.075,
3785      "context_window": 131072,
3786      "default_max_tokens": 13107,
3787      "can_reason": true,
3788      "reasoning_levels": [
3789        "low",
3790        "medium",
3791        "high"
3792      ],
3793      "default_reasoning_effort": "medium",
3794      "supports_attachments": false
3795    },
3796    {
3797      "id": "x-ai/grok-4",
3798      "name": "xAI: Grok 4",
3799      "cost_per_1m_in": 3,
3800      "cost_per_1m_out": 15,
3801      "cost_per_1m_in_cached": 0,
3802      "cost_per_1m_out_cached": 0.75,
3803      "context_window": 256000,
3804      "default_max_tokens": 25600,
3805      "can_reason": true,
3806      "reasoning_levels": [
3807        "low",
3808        "medium",
3809        "high"
3810      ],
3811      "default_reasoning_effort": "medium",
3812      "supports_attachments": true
3813    },
3814    {
3815      "id": "x-ai/grok-4-fast",
3816      "name": "xAI: Grok 4 Fast",
3817      "cost_per_1m_in": 0.2,
3818      "cost_per_1m_out": 0.5,
3819      "cost_per_1m_in_cached": 0,
3820      "cost_per_1m_out_cached": 0.05,
3821      "context_window": 2000000,
3822      "default_max_tokens": 15000,
3823      "can_reason": true,
3824      "reasoning_levels": [
3825        "low",
3826        "medium",
3827        "high"
3828      ],
3829      "default_reasoning_effort": "medium",
3830      "supports_attachments": true
3831    },
3832    {
3833      "id": "x-ai/grok-4.1-fast",
3834      "name": "xAI: Grok 4.1 Fast",
3835      "cost_per_1m_in": 0.2,
3836      "cost_per_1m_out": 0.5,
3837      "cost_per_1m_in_cached": 0,
3838      "cost_per_1m_out_cached": 0.05,
3839      "context_window": 2000000,
3840      "default_max_tokens": 15000,
3841      "can_reason": true,
3842      "reasoning_levels": [
3843        "low",
3844        "medium",
3845        "high"
3846      ],
3847      "default_reasoning_effort": "medium",
3848      "supports_attachments": true
3849    },
3850    {
3851      "id": "x-ai/grok-4.20",
3852      "name": "xAI: Grok 4.20",
3853      "cost_per_1m_in": 1.25,
3854      "cost_per_1m_out": 2.5,
3855      "cost_per_1m_in_cached": 0,
3856      "cost_per_1m_out_cached": 0.2,
3857      "context_window": 2000000,
3858      "default_max_tokens": 200000,
3859      "can_reason": true,
3860      "reasoning_levels": [
3861        "low",
3862        "medium",
3863        "high"
3864      ],
3865      "default_reasoning_effort": "medium",
3866      "supports_attachments": true
3867    },
3868    {
3869      "id": "x-ai/grok-4.3",
3870      "name": "xAI: Grok 4.3",
3871      "cost_per_1m_in": 1.25,
3872      "cost_per_1m_out": 2.5,
3873      "cost_per_1m_in_cached": 0,
3874      "cost_per_1m_out_cached": 0.2,
3875      "context_window": 1000000,
3876      "default_max_tokens": 100000,
3877      "can_reason": true,
3878      "reasoning_levels": [
3879        "low",
3880        "medium",
3881        "high"
3882      ],
3883      "default_reasoning_effort": "medium",
3884      "supports_attachments": true
3885    },
3886    {
3887      "id": "x-ai/grok-code-fast-1",
3888      "name": "xAI: Grok Code Fast 1",
3889      "cost_per_1m_in": 0.2,
3890      "cost_per_1m_out": 1.5,
3891      "cost_per_1m_in_cached": 0,
3892      "cost_per_1m_out_cached": 0.02,
3893      "context_window": 256000,
3894      "default_max_tokens": 5000,
3895      "can_reason": true,
3896      "reasoning_levels": [
3897        "low",
3898        "medium",
3899        "high"
3900      ],
3901      "default_reasoning_effort": "medium",
3902      "supports_attachments": false
3903    }
3904  ],
3905  "default_headers": {
3906    "HTTP-Referer": "https://charm.land",
3907    "X-Title": "Crush"
3908  }
3909}