openrouter.json

   1{
   2  "name": "OpenRouter",
   3  "id": "openrouter",
   4  "api_key": "$OPENROUTER_API_KEY",
   5  "api_endpoint": "https://openrouter.ai/api/v1",
   6  "type": "openrouter",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-3.5-haiku",
   9  "models": [
  10    {
  11      "id": "ai21/jamba-large-1.7",
  12      "name": "AI21: Jamba Large 1.7",
  13      "cost_per_1m_in": 2,
  14      "cost_per_1m_out": 8,
  15      "cost_per_1m_in_cached": 0,
  16      "cost_per_1m_out_cached": 0,
  17      "context_window": 256000,
  18      "default_max_tokens": 2048,
  19      "can_reason": false,
  20      "supports_attachments": false
  21    },
  22    {
  23      "id": "allenai/olmo-3.1-32b-instruct",
  24      "name": "AllenAI: Olmo 3.1 32B Instruct",
  25      "cost_per_1m_in": 0.2,
  26      "cost_per_1m_out": 0.6,
  27      "cost_per_1m_in_cached": 0,
  28      "cost_per_1m_out_cached": 0,
  29      "context_window": 65536,
  30      "default_max_tokens": 8192,
  31      "can_reason": false,
  32      "supports_attachments": false
  33    },
  34    {
  35      "id": "amazon/nova-2-lite-v1",
  36      "name": "Amazon: Nova 2 Lite",
  37      "cost_per_1m_in": 0.3,
  38      "cost_per_1m_out": 2.5,
  39      "cost_per_1m_in_cached": 0,
  40      "cost_per_1m_out_cached": 0,
  41      "context_window": 1000000,
  42      "default_max_tokens": 32767,
  43      "can_reason": true,
  44      "reasoning_levels": [
  45        "low",
  46        "medium",
  47        "high"
  48      ],
  49      "default_reasoning_effort": "medium",
  50      "supports_attachments": true
  51    },
  52    {
  53      "id": "amazon/nova-lite-v1",
  54      "name": "Amazon: Nova Lite 1.0",
  55      "cost_per_1m_in": 0.06,
  56      "cost_per_1m_out": 0.24,
  57      "cost_per_1m_in_cached": 0,
  58      "cost_per_1m_out_cached": 0,
  59      "context_window": 300000,
  60      "default_max_tokens": 2560,
  61      "can_reason": false,
  62      "supports_attachments": true
  63    },
  64    {
  65      "id": "amazon/nova-micro-v1",
  66      "name": "Amazon: Nova Micro 1.0",
  67      "cost_per_1m_in": 0.035,
  68      "cost_per_1m_out": 0.14,
  69      "cost_per_1m_in_cached": 0,
  70      "cost_per_1m_out_cached": 0,
  71      "context_window": 128000,
  72      "default_max_tokens": 2560,
  73      "can_reason": false,
  74      "supports_attachments": false
  75    },
  76    {
  77      "id": "amazon/nova-premier-v1",
  78      "name": "Amazon: Nova Premier 1.0",
  79      "cost_per_1m_in": 2.5,
  80      "cost_per_1m_out": 12.5,
  81      "cost_per_1m_in_cached": 0,
  82      "cost_per_1m_out_cached": 0.625,
  83      "context_window": 1000000,
  84      "default_max_tokens": 16000,
  85      "can_reason": false,
  86      "supports_attachments": true
  87    },
  88    {
  89      "id": "amazon/nova-pro-v1",
  90      "name": "Amazon: Nova Pro 1.0",
  91      "cost_per_1m_in": 0.8,
  92      "cost_per_1m_out": 3.2,
  93      "cost_per_1m_in_cached": 0,
  94      "cost_per_1m_out_cached": 0,
  95      "context_window": 300000,
  96      "default_max_tokens": 2560,
  97      "can_reason": false,
  98      "supports_attachments": true
  99    },
 100    {
 101      "id": "anthropic/claude-3-haiku",
 102      "name": "Anthropic: Claude 3 Haiku",
 103      "cost_per_1m_in": 0.25,
 104      "cost_per_1m_out": 1.25,
 105      "cost_per_1m_in_cached": 0.3,
 106      "cost_per_1m_out_cached": 0.03,
 107      "context_window": 200000,
 108      "default_max_tokens": 2048,
 109      "can_reason": false,
 110      "supports_attachments": true
 111    },
 112    {
 113      "id": "anthropic/claude-3.5-haiku",
 114      "name": "Anthropic: Claude 3.5 Haiku",
 115      "cost_per_1m_in": 0.8,
 116      "cost_per_1m_out": 4,
 117      "cost_per_1m_in_cached": 1,
 118      "cost_per_1m_out_cached": 0.08,
 119      "context_window": 200000,
 120      "default_max_tokens": 4096,
 121      "can_reason": false,
 122      "supports_attachments": true
 123    },
 124    {
 125      "id": "anthropic/claude-3.7-sonnet",
 126      "name": "Anthropic: Claude 3.7 Sonnet",
 127      "cost_per_1m_in": 3,
 128      "cost_per_1m_out": 15,
 129      "cost_per_1m_in_cached": 3.75,
 130      "cost_per_1m_out_cached": 0.3,
 131      "context_window": 200000,
 132      "default_max_tokens": 32000,
 133      "can_reason": true,
 134      "reasoning_levels": [
 135        "low",
 136        "medium",
 137        "high"
 138      ],
 139      "default_reasoning_effort": "medium",
 140      "supports_attachments": true
 141    },
 142    {
 143      "id": "anthropic/claude-3.7-sonnet:thinking",
 144      "name": "Anthropic: Claude 3.7 Sonnet (thinking)",
 145      "cost_per_1m_in": 3,
 146      "cost_per_1m_out": 15,
 147      "cost_per_1m_in_cached": 3.75,
 148      "cost_per_1m_out_cached": 0.3,
 149      "context_window": 200000,
 150      "default_max_tokens": 32000,
 151      "can_reason": true,
 152      "reasoning_levels": [
 153        "low",
 154        "medium",
 155        "high"
 156      ],
 157      "default_reasoning_effort": "medium",
 158      "supports_attachments": true
 159    },
 160    {
 161      "id": "anthropic/claude-haiku-4.5",
 162      "name": "Anthropic: Claude Haiku 4.5",
 163      "cost_per_1m_in": 1,
 164      "cost_per_1m_out": 5,
 165      "cost_per_1m_in_cached": 1.25,
 166      "cost_per_1m_out_cached": 0.1,
 167      "context_window": 200000,
 168      "default_max_tokens": 32000,
 169      "can_reason": true,
 170      "reasoning_levels": [
 171        "low",
 172        "medium",
 173        "high"
 174      ],
 175      "default_reasoning_effort": "medium",
 176      "supports_attachments": true
 177    },
 178    {
 179      "id": "anthropic/claude-opus-4",
 180      "name": "Anthropic: Claude Opus 4",
 181      "cost_per_1m_in": 15,
 182      "cost_per_1m_out": 75,
 183      "cost_per_1m_in_cached": 18.75,
 184      "cost_per_1m_out_cached": 1.5,
 185      "context_window": 200000,
 186      "default_max_tokens": 16000,
 187      "can_reason": true,
 188      "reasoning_levels": [
 189        "low",
 190        "medium",
 191        "high"
 192      ],
 193      "default_reasoning_effort": "medium",
 194      "supports_attachments": true
 195    },
 196    {
 197      "id": "anthropic/claude-opus-4.1",
 198      "name": "Anthropic: Claude Opus 4.1",
 199      "cost_per_1m_in": 15,
 200      "cost_per_1m_out": 75,
 201      "cost_per_1m_in_cached": 18.75,
 202      "cost_per_1m_out_cached": 1.5,
 203      "context_window": 200000,
 204      "default_max_tokens": 16000,
 205      "can_reason": true,
 206      "reasoning_levels": [
 207        "low",
 208        "medium",
 209        "high"
 210      ],
 211      "default_reasoning_effort": "medium",
 212      "supports_attachments": true
 213    },
 214    {
 215      "id": "anthropic/claude-opus-4.5",
 216      "name": "Anthropic: Claude Opus 4.5",
 217      "cost_per_1m_in": 5,
 218      "cost_per_1m_out": 25,
 219      "cost_per_1m_in_cached": 6.25,
 220      "cost_per_1m_out_cached": 0.5,
 221      "context_window": 200000,
 222      "default_max_tokens": 32000,
 223      "can_reason": true,
 224      "reasoning_levels": [
 225        "low",
 226        "medium",
 227        "high"
 228      ],
 229      "default_reasoning_effort": "medium",
 230      "supports_attachments": true
 231    },
 232    {
 233      "id": "anthropic/claude-opus-4.6",
 234      "name": "Anthropic: Claude Opus 4.6",
 235      "cost_per_1m_in": 5,
 236      "cost_per_1m_out": 25,
 237      "cost_per_1m_in_cached": 6.25,
 238      "cost_per_1m_out_cached": 0.5,
 239      "context_window": 1000000,
 240      "default_max_tokens": 64000,
 241      "can_reason": true,
 242      "reasoning_levels": [
 243        "low",
 244        "medium",
 245        "high"
 246      ],
 247      "default_reasoning_effort": "medium",
 248      "supports_attachments": true
 249    },
 250    {
 251      "id": "anthropic/claude-opus-4.6-fast",
 252      "name": "Anthropic: Claude Opus 4.6 (Fast)",
 253      "cost_per_1m_in": 30,
 254      "cost_per_1m_out": 150,
 255      "cost_per_1m_in_cached": 37.5,
 256      "cost_per_1m_out_cached": 3,
 257      "context_window": 1000000,
 258      "default_max_tokens": 64000,
 259      "can_reason": true,
 260      "reasoning_levels": [
 261        "low",
 262        "medium",
 263        "high"
 264      ],
 265      "default_reasoning_effort": "medium",
 266      "supports_attachments": true
 267    },
 268    {
 269      "id": "anthropic/claude-opus-4.7",
 270      "name": "Anthropic: Claude Opus 4.7",
 271      "cost_per_1m_in": 5,
 272      "cost_per_1m_out": 25,
 273      "cost_per_1m_in_cached": 6.25,
 274      "cost_per_1m_out_cached": 0.5,
 275      "context_window": 1000000,
 276      "default_max_tokens": 64000,
 277      "can_reason": true,
 278      "reasoning_levels": [
 279        "low",
 280        "medium",
 281        "high"
 282      ],
 283      "default_reasoning_effort": "medium",
 284      "supports_attachments": true
 285    },
 286    {
 287      "id": "anthropic/claude-sonnet-4",
 288      "name": "Anthropic: Claude Sonnet 4",
 289      "cost_per_1m_in": 3,
 290      "cost_per_1m_out": 15,
 291      "cost_per_1m_in_cached": 3.75,
 292      "cost_per_1m_out_cached": 0.3,
 293      "context_window": 1000000,
 294      "default_max_tokens": 32000,
 295      "can_reason": true,
 296      "reasoning_levels": [
 297        "low",
 298        "medium",
 299        "high"
 300      ],
 301      "default_reasoning_effort": "medium",
 302      "supports_attachments": true
 303    },
 304    {
 305      "id": "anthropic/claude-sonnet-4.5",
 306      "name": "Anthropic: Claude Sonnet 4.5",
 307      "cost_per_1m_in": 3,
 308      "cost_per_1m_out": 15,
 309      "cost_per_1m_in_cached": 3.75,
 310      "cost_per_1m_out_cached": 0.3,
 311      "context_window": 1000000,
 312      "default_max_tokens": 32000,
 313      "can_reason": true,
 314      "reasoning_levels": [
 315        "low",
 316        "medium",
 317        "high"
 318      ],
 319      "default_reasoning_effort": "medium",
 320      "supports_attachments": true
 321    },
 322    {
 323      "id": "anthropic/claude-sonnet-4.6",
 324      "name": "Anthropic: Claude Sonnet 4.6",
 325      "cost_per_1m_in": 3,
 326      "cost_per_1m_out": 15,
 327      "cost_per_1m_in_cached": 3.75,
 328      "cost_per_1m_out_cached": 0.3,
 329      "context_window": 1000000,
 330      "default_max_tokens": 64000,
 331      "can_reason": true,
 332      "reasoning_levels": [
 333        "low",
 334        "medium",
 335        "high"
 336      ],
 337      "default_reasoning_effort": "medium",
 338      "supports_attachments": true
 339    },
 340    {
 341      "id": "arcee-ai/trinity-large-preview",
 342      "name": "Arcee AI: Trinity Large Preview",
 343      "cost_per_1m_in": 0.15,
 344      "cost_per_1m_out": 0.45,
 345      "cost_per_1m_in_cached": 0,
 346      "cost_per_1m_out_cached": 0,
 347      "context_window": 131000,
 348      "default_max_tokens": 13100,
 349      "can_reason": false,
 350      "supports_attachments": false
 351    },
 352    {
 353      "id": "arcee-ai/trinity-large-thinking",
 354      "name": "Arcee AI: Trinity Large Thinking",
 355      "cost_per_1m_in": 0.22,
 356      "cost_per_1m_out": 0.85,
 357      "cost_per_1m_in_cached": 0,
 358      "cost_per_1m_out_cached": 0.06,
 359      "context_window": 262144,
 360      "default_max_tokens": 131072,
 361      "can_reason": true,
 362      "reasoning_levels": [
 363        "low",
 364        "medium",
 365        "high"
 366      ],
 367      "default_reasoning_effort": "medium",
 368      "supports_attachments": false
 369    },
 370    {
 371      "id": "arcee-ai/trinity-mini",
 372      "name": "Arcee AI: Trinity Mini",
 373      "cost_per_1m_in": 0.045,
 374      "cost_per_1m_out": 0.15,
 375      "cost_per_1m_in_cached": 0,
 376      "cost_per_1m_out_cached": 0,
 377      "context_window": 131072,
 378      "default_max_tokens": 65536,
 379      "can_reason": true,
 380      "reasoning_levels": [
 381        "low",
 382        "medium",
 383        "high"
 384      ],
 385      "default_reasoning_effort": "medium",
 386      "supports_attachments": false
 387    },
 388    {
 389      "id": "arcee-ai/virtuoso-large",
 390      "name": "Arcee AI: Virtuoso Large",
 391      "cost_per_1m_in": 0.75,
 392      "cost_per_1m_out": 1.2,
 393      "cost_per_1m_in_cached": 0,
 394      "cost_per_1m_out_cached": 0,
 395      "context_window": 131072,
 396      "default_max_tokens": 32000,
 397      "can_reason": false,
 398      "supports_attachments": false
 399    },
 400    {
 401      "id": "baidu/cobuddy:free",
 402      "name": "Baidu Qianfan: CoBuddy (free)",
 403      "cost_per_1m_in": 0,
 404      "cost_per_1m_out": 0,
 405      "cost_per_1m_in_cached": 0,
 406      "cost_per_1m_out_cached": 0,
 407      "context_window": 131072,
 408      "default_max_tokens": 32768,
 409      "can_reason": true,
 410      "reasoning_levels": [
 411        "low",
 412        "medium",
 413        "high"
 414      ],
 415      "default_reasoning_effort": "medium",
 416      "supports_attachments": false
 417    },
 418    {
 419      "id": "baidu/ernie-4.5-21b-a3b",
 420      "name": "Baidu: ERNIE 4.5 21B A3B",
 421      "cost_per_1m_in": 0.07,
 422      "cost_per_1m_out": 0.28,
 423      "cost_per_1m_in_cached": 0,
 424      "cost_per_1m_out_cached": 0,
 425      "context_window": 120000,
 426      "default_max_tokens": 4000,
 427      "can_reason": false,
 428      "supports_attachments": false
 429    },
 430    {
 431      "id": "baidu/ernie-4.5-vl-28b-a3b",
 432      "name": "Baidu: ERNIE 4.5 VL 28B A3B",
 433      "cost_per_1m_in": 0.14,
 434      "cost_per_1m_out": 0.56,
 435      "cost_per_1m_in_cached": 0,
 436      "cost_per_1m_out_cached": 0,
 437      "context_window": 30000,
 438      "default_max_tokens": 4000,
 439      "can_reason": true,
 440      "reasoning_levels": [
 441        "low",
 442        "medium",
 443        "high"
 444      ],
 445      "default_reasoning_effort": "medium",
 446      "supports_attachments": true
 447    },
 448    {
 449      "id": "bytedance-seed/seed-1.6",
 450      "name": "ByteDance Seed: Seed 1.6",
 451      "cost_per_1m_in": 0.25,
 452      "cost_per_1m_out": 2,
 453      "cost_per_1m_in_cached": 0,
 454      "cost_per_1m_out_cached": 0,
 455      "context_window": 262144,
 456      "default_max_tokens": 16384,
 457      "can_reason": true,
 458      "reasoning_levels": [
 459        "low",
 460        "medium",
 461        "high"
 462      ],
 463      "default_reasoning_effort": "medium",
 464      "supports_attachments": true
 465    },
 466    {
 467      "id": "bytedance-seed/seed-1.6-flash",
 468      "name": "ByteDance Seed: Seed 1.6 Flash",
 469      "cost_per_1m_in": 0.075,
 470      "cost_per_1m_out": 0.3,
 471      "cost_per_1m_in_cached": 0,
 472      "cost_per_1m_out_cached": 0,
 473      "context_window": 262144,
 474      "default_max_tokens": 16384,
 475      "can_reason": true,
 476      "reasoning_levels": [
 477        "low",
 478        "medium",
 479        "high"
 480      ],
 481      "default_reasoning_effort": "medium",
 482      "supports_attachments": true
 483    },
 484    {
 485      "id": "bytedance-seed/seed-2.0-lite",
 486      "name": "ByteDance Seed: Seed-2.0-Lite",
 487      "cost_per_1m_in": 0.25,
 488      "cost_per_1m_out": 2,
 489      "cost_per_1m_in_cached": 0,
 490      "cost_per_1m_out_cached": 0,
 491      "context_window": 262144,
 492      "default_max_tokens": 65536,
 493      "can_reason": true,
 494      "reasoning_levels": [
 495        "low",
 496        "medium",
 497        "high"
 498      ],
 499      "default_reasoning_effort": "medium",
 500      "supports_attachments": true
 501    },
 502    {
 503      "id": "bytedance-seed/seed-2.0-mini",
 504      "name": "ByteDance Seed: Seed-2.0-Mini",
 505      "cost_per_1m_in": 0.1,
 506      "cost_per_1m_out": 0.4,
 507      "cost_per_1m_in_cached": 0,
 508      "cost_per_1m_out_cached": 0,
 509      "context_window": 262144,
 510      "default_max_tokens": 65536,
 511      "can_reason": true,
 512      "reasoning_levels": [
 513        "low",
 514        "medium",
 515        "high"
 516      ],
 517      "default_reasoning_effort": "medium",
 518      "supports_attachments": true
 519    },
 520    {
 521      "id": "cohere/command-r-08-2024",
 522      "name": "Cohere: Command R (08-2024)",
 523      "cost_per_1m_in": 0.15,
 524      "cost_per_1m_out": 0.6,
 525      "cost_per_1m_in_cached": 0,
 526      "cost_per_1m_out_cached": 0,
 527      "context_window": 128000,
 528      "default_max_tokens": 2000,
 529      "can_reason": false,
 530      "supports_attachments": false
 531    },
 532    {
 533      "id": "cohere/command-r-plus-08-2024",
 534      "name": "Cohere: Command R+ (08-2024)",
 535      "cost_per_1m_in": 2.5,
 536      "cost_per_1m_out": 10,
 537      "cost_per_1m_in_cached": 0,
 538      "cost_per_1m_out_cached": 0,
 539      "context_window": 128000,
 540      "default_max_tokens": 2000,
 541      "can_reason": false,
 542      "supports_attachments": false
 543    },
 544    {
 545      "id": "deepseek/deepseek-chat",
 546      "name": "DeepSeek: DeepSeek V3",
 547      "cost_per_1m_in": 0.32,
 548      "cost_per_1m_out": 0.89,
 549      "cost_per_1m_in_cached": 0,
 550      "cost_per_1m_out_cached": 0,
 551      "context_window": 163840,
 552      "default_max_tokens": 8192,
 553      "can_reason": false,
 554      "supports_attachments": false
 555    },
 556    {
 557      "id": "deepseek/deepseek-chat-v3-0324",
 558      "name": "DeepSeek: DeepSeek V3 0324",
 559      "cost_per_1m_in": 0.27,
 560      "cost_per_1m_out": 1.12,
 561      "cost_per_1m_in_cached": 0,
 562      "cost_per_1m_out_cached": 0.135,
 563      "context_window": 163840,
 564      "default_max_tokens": 81920,
 565      "can_reason": false,
 566      "supports_attachments": false
 567    },
 568    {
 569      "id": "deepseek/deepseek-chat-v3.1",
 570      "name": "DeepSeek: DeepSeek V3.1",
 571      "cost_per_1m_in": 0.21,
 572      "cost_per_1m_out": 0.79,
 573      "cost_per_1m_in_cached": 0,
 574      "cost_per_1m_out_cached": 0.13,
 575      "context_window": 163840,
 576      "default_max_tokens": 16384,
 577      "can_reason": true,
 578      "reasoning_levels": [
 579        "low",
 580        "medium",
 581        "high"
 582      ],
 583      "default_reasoning_effort": "medium",
 584      "supports_attachments": false
 585    },
 586    {
 587      "id": "deepseek/deepseek-v3.1-terminus",
 588      "name": "DeepSeek: DeepSeek V3.1 Terminus",
 589      "cost_per_1m_in": 0.27,
 590      "cost_per_1m_out": 1,
 591      "cost_per_1m_in_cached": 0,
 592      "cost_per_1m_out_cached": 0,
 593      "context_window": 163840,
 594      "default_max_tokens": 81920,
 595      "can_reason": true,
 596      "reasoning_levels": [
 597        "low",
 598        "medium",
 599        "high"
 600      ],
 601      "default_reasoning_effort": "medium",
 602      "supports_attachments": false
 603    },
 604    {
 605      "id": "deepseek/deepseek-v3.2",
 606      "name": "DeepSeek: DeepSeek V3.2",
 607      "cost_per_1m_in": 0.26,
 608      "cost_per_1m_out": 0.38,
 609      "cost_per_1m_in_cached": 0,
 610      "cost_per_1m_out_cached": 0.13,
 611      "context_window": 163840,
 612      "default_max_tokens": 8192,
 613      "can_reason": true,
 614      "reasoning_levels": [
 615        "low",
 616        "medium",
 617        "high"
 618      ],
 619      "default_reasoning_effort": "medium",
 620      "supports_attachments": false
 621    },
 622    {
 623      "id": "deepseek/deepseek-v3.2-exp",
 624      "name": "DeepSeek: DeepSeek V3.2 Exp",
 625      "cost_per_1m_in": 0.27,
 626      "cost_per_1m_out": 0.41,
 627      "cost_per_1m_in_cached": 0,
 628      "cost_per_1m_out_cached": 0.27,
 629      "context_window": 163840,
 630      "default_max_tokens": 81920,
 631      "can_reason": true,
 632      "reasoning_levels": [
 633        "low",
 634        "medium",
 635        "high"
 636      ],
 637      "default_reasoning_effort": "medium",
 638      "supports_attachments": false
 639    },
 640    {
 641      "id": "deepseek/deepseek-v4-flash",
 642      "name": "DeepSeek: DeepSeek V4 Flash",
 643      "cost_per_1m_in": 0.14,
 644      "cost_per_1m_out": 0.28,
 645      "cost_per_1m_in_cached": 0,
 646      "cost_per_1m_out_cached": 0.0028,
 647      "context_window": 1048576,
 648      "default_max_tokens": 192000,
 649      "can_reason": true,
 650      "reasoning_levels": [
 651        "low",
 652        "medium",
 653        "high"
 654      ],
 655      "default_reasoning_effort": "medium",
 656      "supports_attachments": false
 657    },
 658    {
 659      "id": "deepseek/deepseek-v4-pro",
 660      "name": "DeepSeek: DeepSeek V4 Pro",
 661      "cost_per_1m_in": 0.435,
 662      "cost_per_1m_out": 0.87,
 663      "cost_per_1m_in_cached": 0,
 664      "cost_per_1m_out_cached": 0.00363,
 665      "context_window": 1048576,
 666      "default_max_tokens": 192000,
 667      "can_reason": true,
 668      "reasoning_levels": [
 669        "low",
 670        "medium",
 671        "high"
 672      ],
 673      "default_reasoning_effort": "medium",
 674      "supports_attachments": false
 675    },
 676    {
 677      "id": "deepseek/deepseek-r1",
 678      "name": "DeepSeek: R1",
 679      "cost_per_1m_in": 0.7,
 680      "cost_per_1m_out": 2.5,
 681      "cost_per_1m_in_cached": 0,
 682      "cost_per_1m_out_cached": 0,
 683      "context_window": 64000,
 684      "default_max_tokens": 8000,
 685      "can_reason": true,
 686      "reasoning_levels": [
 687        "low",
 688        "medium",
 689        "high"
 690      ],
 691      "default_reasoning_effort": "medium",
 692      "supports_attachments": false
 693    },
 694    {
 695      "id": "deepseek/deepseek-r1-0528",
 696      "name": "DeepSeek: R1 0528",
 697      "cost_per_1m_in": 0.5,
 698      "cost_per_1m_out": 2.18,
 699      "cost_per_1m_in_cached": 0,
 700      "cost_per_1m_out_cached": 0,
 701      "context_window": 163840,
 702      "default_max_tokens": 81920,
 703      "can_reason": true,
 704      "reasoning_levels": [
 705        "low",
 706        "medium",
 707        "high"
 708      ],
 709      "default_reasoning_effort": "medium",
 710      "supports_attachments": false
 711    },
 712    {
 713      "id": "essentialai/rnj-1-instruct",
 714      "name": "EssentialAI: Rnj 1 Instruct",
 715      "cost_per_1m_in": 0.15,
 716      "cost_per_1m_out": 0.15,
 717      "cost_per_1m_in_cached": 0,
 718      "cost_per_1m_out_cached": 0,
 719      "context_window": 32768,
 720      "default_max_tokens": 3276,
 721      "can_reason": false,
 722      "supports_attachments": false
 723    },
 724    {
 725      "id": "google/gemini-2.0-flash-001",
 726      "name": "Google: Gemini 2.0 Flash",
 727      "cost_per_1m_in": 0.1,
 728      "cost_per_1m_out": 0.4,
 729      "cost_per_1m_in_cached": 0.08333,
 730      "cost_per_1m_out_cached": 0.025,
 731      "context_window": 1048576,
 732      "default_max_tokens": 4096,
 733      "can_reason": false,
 734      "supports_attachments": true
 735    },
 736    {
 737      "id": "google/gemini-2.0-flash-lite-001",
 738      "name": "Google: Gemini 2.0 Flash Lite",
 739      "cost_per_1m_in": 0.075,
 740      "cost_per_1m_out": 0.3,
 741      "cost_per_1m_in_cached": 0,
 742      "cost_per_1m_out_cached": 0,
 743      "context_window": 1048576,
 744      "default_max_tokens": 4096,
 745      "can_reason": false,
 746      "supports_attachments": true
 747    },
 748    {
 749      "id": "google/gemini-2.5-flash",
 750      "name": "Google: Gemini 2.5 Flash",
 751      "cost_per_1m_in": 0.3,
 752      "cost_per_1m_out": 2.5,
 753      "cost_per_1m_in_cached": 0.08333,
 754      "cost_per_1m_out_cached": 0.03,
 755      "context_window": 1048576,
 756      "default_max_tokens": 32767,
 757      "can_reason": true,
 758      "reasoning_levels": [
 759        "low",
 760        "medium",
 761        "high"
 762      ],
 763      "default_reasoning_effort": "medium",
 764      "supports_attachments": true
 765    },
 766    {
 767      "id": "google/gemini-2.5-flash-lite",
 768      "name": "Google: Gemini 2.5 Flash Lite",
 769      "cost_per_1m_in": 0.1,
 770      "cost_per_1m_out": 0.4,
 771      "cost_per_1m_in_cached": 0.08333,
 772      "cost_per_1m_out_cached": 0.01,
 773      "context_window": 1048576,
 774      "default_max_tokens": 32767,
 775      "can_reason": true,
 776      "reasoning_levels": [
 777        "low",
 778        "medium",
 779        "high"
 780      ],
 781      "default_reasoning_effort": "medium",
 782      "supports_attachments": true
 783    },
 784    {
 785      "id": "google/gemini-2.5-flash-lite-preview-09-2025",
 786      "name": "Google: Gemini 2.5 Flash Lite Preview 09-2025",
 787      "cost_per_1m_in": 0.1,
 788      "cost_per_1m_out": 0.4,
 789      "cost_per_1m_in_cached": 0.08333,
 790      "cost_per_1m_out_cached": 0.01,
 791      "context_window": 1048576,
 792      "default_max_tokens": 32767,
 793      "can_reason": true,
 794      "reasoning_levels": [
 795        "low",
 796        "medium",
 797        "high"
 798      ],
 799      "default_reasoning_effort": "medium",
 800      "supports_attachments": true
 801    },
 802    {
 803      "id": "google/gemini-2.5-pro",
 804      "name": "Google: Gemini 2.5 Pro",
 805      "cost_per_1m_in": 1.25,
 806      "cost_per_1m_out": 10,
 807      "cost_per_1m_in_cached": 0.375,
 808      "cost_per_1m_out_cached": 0.125,
 809      "context_window": 1048576,
 810      "default_max_tokens": 32768,
 811      "can_reason": true,
 812      "reasoning_levels": [
 813        "low",
 814        "medium",
 815        "high"
 816      ],
 817      "default_reasoning_effort": "medium",
 818      "supports_attachments": true
 819    },
 820    {
 821      "id": "google/gemini-2.5-pro-preview-05-06",
 822      "name": "Google: Gemini 2.5 Pro Preview 05-06",
 823      "cost_per_1m_in": 1.25,
 824      "cost_per_1m_out": 10,
 825      "cost_per_1m_in_cached": 0.375,
 826      "cost_per_1m_out_cached": 0.125,
 827      "context_window": 1048576,
 828      "default_max_tokens": 32768,
 829      "can_reason": true,
 830      "reasoning_levels": [
 831        "low",
 832        "medium",
 833        "high"
 834      ],
 835      "default_reasoning_effort": "medium",
 836      "supports_attachments": true
 837    },
 838    {
 839      "id": "google/gemini-2.5-pro-preview",
 840      "name": "Google: Gemini 2.5 Pro Preview 06-05",
 841      "cost_per_1m_in": 1.25,
 842      "cost_per_1m_out": 10,
 843      "cost_per_1m_in_cached": 0.375,
 844      "cost_per_1m_out_cached": 0.125,
 845      "context_window": 1048576,
 846      "default_max_tokens": 32768,
 847      "can_reason": true,
 848      "reasoning_levels": [
 849        "low",
 850        "medium",
 851        "high"
 852      ],
 853      "default_reasoning_effort": "medium",
 854      "supports_attachments": true
 855    },
 856    {
 857      "id": "google/gemini-3-flash-preview",
 858      "name": "Google: Gemini 3 Flash Preview",
 859      "cost_per_1m_in": 0.5,
 860      "cost_per_1m_out": 3,
 861      "cost_per_1m_in_cached": 0.08333,
 862      "cost_per_1m_out_cached": 0.05,
 863      "context_window": 1048576,
 864      "default_max_tokens": 32768,
 865      "can_reason": true,
 866      "reasoning_levels": [
 867        "low",
 868        "medium",
 869        "high"
 870      ],
 871      "default_reasoning_effort": "medium",
 872      "supports_attachments": true
 873    },
 874    {
 875      "id": "google/gemini-3.1-flash-lite-preview",
 876      "name": "Google: Gemini 3.1 Flash Lite Preview",
 877      "cost_per_1m_in": 0.25,
 878      "cost_per_1m_out": 1.5,
 879      "cost_per_1m_in_cached": 0.08333,
 880      "cost_per_1m_out_cached": 0.025,
 881      "context_window": 1048576,
 882      "default_max_tokens": 32768,
 883      "can_reason": true,
 884      "reasoning_levels": [
 885        "low",
 886        "medium",
 887        "high"
 888      ],
 889      "default_reasoning_effort": "medium",
 890      "supports_attachments": true
 891    },
 892    {
 893      "id": "google/gemini-3.1-pro-preview",
 894      "name": "Google: Gemini 3.1 Pro Preview",
 895      "cost_per_1m_in": 2,
 896      "cost_per_1m_out": 12,
 897      "cost_per_1m_in_cached": 0.375,
 898      "cost_per_1m_out_cached": 0.2,
 899      "context_window": 1048576,
 900      "default_max_tokens": 32768,
 901      "can_reason": true,
 902      "reasoning_levels": [
 903        "low",
 904        "medium",
 905        "high"
 906      ],
 907      "default_reasoning_effort": "medium",
 908      "supports_attachments": true
 909    },
 910    {
 911      "id": "google/gemini-3.1-pro-preview-customtools",
 912      "name": "Google: Gemini 3.1 Pro Preview Custom Tools",
 913      "cost_per_1m_in": 2,
 914      "cost_per_1m_out": 12,
 915      "cost_per_1m_in_cached": 0.375,
 916      "cost_per_1m_out_cached": 0.2,
 917      "context_window": 1048576,
 918      "default_max_tokens": 32768,
 919      "can_reason": true,
 920      "reasoning_levels": [
 921        "low",
 922        "medium",
 923        "high"
 924      ],
 925      "default_reasoning_effort": "medium",
 926      "supports_attachments": true
 927    },
 928    {
 929      "id": "google/gemma-3-12b-it",
 930      "name": "Google: Gemma 3 12B",
 931      "cost_per_1m_in": 0.04,
 932      "cost_per_1m_out": 0.13,
 933      "cost_per_1m_in_cached": 0,
 934      "cost_per_1m_out_cached": 0,
 935      "context_window": 131072,
 936      "default_max_tokens": 8192,
 937      "can_reason": false,
 938      "supports_attachments": true
 939    },
 940    {
 941      "id": "google/gemma-3-27b-it",
 942      "name": "Google: Gemma 3 27B",
 943      "cost_per_1m_in": 0.08,
 944      "cost_per_1m_out": 0.16,
 945      "cost_per_1m_in_cached": 0,
 946      "cost_per_1m_out_cached": 0,
 947      "context_window": 131072,
 948      "default_max_tokens": 8192,
 949      "can_reason": false,
 950      "supports_attachments": true
 951    },
 952    {
 953      "id": "google/gemma-4-26b-a4b-it",
 954      "name": "Google: Gemma 4 26B A4B ",
 955      "cost_per_1m_in": 0.15,
 956      "cost_per_1m_out": 0.5,
 957      "cost_per_1m_in_cached": 0,
 958      "cost_per_1m_out_cached": 0.15,
 959      "context_window": 262144,
 960      "default_max_tokens": 131072,
 961      "can_reason": true,
 962      "reasoning_levels": [
 963        "low",
 964        "medium",
 965        "high"
 966      ],
 967      "default_reasoning_effort": "medium",
 968      "supports_attachments": true
 969    },
 970    {
 971      "id": "google/gemma-4-26b-a4b-it:free",
 972      "name": "Google: Gemma 4 26B A4B  (free)",
 973      "cost_per_1m_in": 0,
 974      "cost_per_1m_out": 0,
 975      "cost_per_1m_in_cached": 0,
 976      "cost_per_1m_out_cached": 0,
 977      "context_window": 262144,
 978      "default_max_tokens": 16384,
 979      "can_reason": true,
 980      "reasoning_levels": [
 981        "low",
 982        "medium",
 983        "high"
 984      ],
 985      "default_reasoning_effort": "medium",
 986      "supports_attachments": true
 987    },
 988    {
 989      "id": "google/gemma-4-31b-it",
 990      "name": "Google: Gemma 4 31B",
 991      "cost_per_1m_in": 0.13,
 992      "cost_per_1m_out": 0.38,
 993      "cost_per_1m_in_cached": 0,
 994      "cost_per_1m_out_cached": 0,
 995      "context_window": 262144,
 996      "default_max_tokens": 8192,
 997      "can_reason": true,
 998      "reasoning_levels": [
 999        "low",
1000        "medium",
1001        "high"
1002      ],
1003      "default_reasoning_effort": "medium",
1004      "supports_attachments": true
1005    },
1006    {
1007      "id": "google/gemma-4-31b-it:free",
1008      "name": "Google: Gemma 4 31B (free)",
1009      "cost_per_1m_in": 0,
1010      "cost_per_1m_out": 0,
1011      "cost_per_1m_in_cached": 0,
1012      "cost_per_1m_out_cached": 0,
1013      "context_window": 262144,
1014      "default_max_tokens": 16384,
1015      "can_reason": true,
1016      "reasoning_levels": [
1017        "low",
1018        "medium",
1019        "high"
1020      ],
1021      "default_reasoning_effort": "medium",
1022      "supports_attachments": true
1023    },
1024    {
1025      "id": "ibm-granite/granite-4.1-8b",
1026      "name": "IBM: Granite 4.1 8B",
1027      "cost_per_1m_in": 0.05,
1028      "cost_per_1m_out": 0.1,
1029      "cost_per_1m_in_cached": 0,
1030      "cost_per_1m_out_cached": 0.05,
1031      "context_window": 131072,
1032      "default_max_tokens": 65536,
1033      "can_reason": false,
1034      "supports_attachments": false
1035    },
1036    {
1037      "id": "inception/mercury-2",
1038      "name": "Inception: Mercury 2",
1039      "cost_per_1m_in": 0.25,
1040      "cost_per_1m_out": 0.75,
1041      "cost_per_1m_in_cached": 0,
1042      "cost_per_1m_out_cached": 0.025,
1043      "context_window": 128000,
1044      "default_max_tokens": 25000,
1045      "can_reason": true,
1046      "reasoning_levels": [
1047        "low",
1048        "medium",
1049        "high"
1050      ],
1051      "default_reasoning_effort": "medium",
1052      "supports_attachments": false
1053    },
1054    {
1055      "id": "kwaipilot/kat-coder-pro-v2",
1056      "name": "Kwaipilot: KAT-Coder-Pro V2",
1057      "cost_per_1m_in": 0.3,
1058      "cost_per_1m_out": 1.2,
1059      "cost_per_1m_in_cached": 0,
1060      "cost_per_1m_out_cached": 0.06,
1061      "context_window": 256000,
1062      "default_max_tokens": 40000,
1063      "can_reason": false,
1064      "supports_attachments": false
1065    },
1066    {
1067      "id": "meta-llama/llama-3.1-70b-instruct",
1068      "name": "Meta: Llama 3.1 70B Instruct",
1069      "cost_per_1m_in": 0.4,
1070      "cost_per_1m_out": 0.4,
1071      "cost_per_1m_in_cached": 0,
1072      "cost_per_1m_out_cached": 0,
1073      "context_window": 131072,
1074      "default_max_tokens": 8192,
1075      "can_reason": false,
1076      "supports_attachments": false
1077    },
1078    {
1079      "id": "meta-llama/llama-3.3-70b-instruct",
1080      "name": "Meta: Llama 3.3 70B Instruct",
1081      "cost_per_1m_in": 0.6,
1082      "cost_per_1m_out": 0.6,
1083      "cost_per_1m_in_cached": 0,
1084      "cost_per_1m_out_cached": 0.6,
1085      "context_window": 131072,
1086      "default_max_tokens": 65536,
1087      "can_reason": false,
1088      "supports_attachments": false
1089    },
1090    {
1091      "id": "meta-llama/llama-3.3-70b-instruct:free",
1092      "name": "Meta: Llama 3.3 70B Instruct (free)",
1093      "cost_per_1m_in": 0,
1094      "cost_per_1m_out": 0,
1095      "cost_per_1m_in_cached": 0,
1096      "cost_per_1m_out_cached": 0,
1097      "context_window": 65536,
1098      "default_max_tokens": 6553,
1099      "can_reason": false,
1100      "supports_attachments": false
1101    },
1102    {
1103      "id": "meta-llama/llama-4-scout",
1104      "name": "Meta: Llama 4 Scout",
1105      "cost_per_1m_in": 0.25,
1106      "cost_per_1m_out": 0.7,
1107      "cost_per_1m_in_cached": 0,
1108      "cost_per_1m_out_cached": 0,
1109      "context_window": 1310720,
1110      "default_max_tokens": 4096,
1111      "can_reason": false,
1112      "supports_attachments": true
1113    },
1114    {
1115      "id": "minimax/minimax-m2",
1116      "name": "MiniMax: MiniMax M2",
1117      "cost_per_1m_in": 0.255,
1118      "cost_per_1m_out": 1,
1119      "cost_per_1m_in_cached": 0,
1120      "cost_per_1m_out_cached": 0.03,
1121      "context_window": 196608,
1122      "default_max_tokens": 98304,
1123      "can_reason": true,
1124      "reasoning_levels": [
1125        "low",
1126        "medium",
1127        "high"
1128      ],
1129      "default_reasoning_effort": "medium",
1130      "supports_attachments": false
1131    },
1132    {
1133      "id": "minimax/minimax-m2.1",
1134      "name": "MiniMax: MiniMax M2.1",
1135      "cost_per_1m_in": 0.3,
1136      "cost_per_1m_out": 1.2,
1137      "cost_per_1m_in_cached": 0,
1138      "cost_per_1m_out_cached": 0.03,
1139      "context_window": 204800,
1140      "default_max_tokens": 65536,
1141      "can_reason": true,
1142      "reasoning_levels": [
1143        "low",
1144        "medium",
1145        "high"
1146      ],
1147      "default_reasoning_effort": "medium",
1148      "supports_attachments": false
1149    },
1150    {
1151      "id": "minimax/minimax-m2.5",
1152      "name": "MiniMax: MiniMax M2.5",
1153      "cost_per_1m_in": 0.3,
1154      "cost_per_1m_out": 1.2,
1155      "cost_per_1m_in_cached": 0,
1156      "cost_per_1m_out_cached": 0.03,
1157      "context_window": 204800,
1158      "default_max_tokens": 65550,
1159      "can_reason": true,
1160      "reasoning_levels": [
1161        "low",
1162        "medium",
1163        "high"
1164      ],
1165      "default_reasoning_effort": "medium",
1166      "supports_attachments": false
1167    },
1168    {
1169      "id": "minimax/minimax-m2.5:free",
1170      "name": "MiniMax: MiniMax M2.5 (free)",
1171      "cost_per_1m_in": 0,
1172      "cost_per_1m_out": 0,
1173      "cost_per_1m_in_cached": 0,
1174      "cost_per_1m_out_cached": 0,
1175      "context_window": 196608,
1176      "default_max_tokens": 4096,
1177      "can_reason": true,
1178      "reasoning_levels": [
1179        "low",
1180        "medium",
1181        "high"
1182      ],
1183      "default_reasoning_effort": "medium",
1184      "supports_attachments": false
1185    },
1186    {
1187      "id": "minimax/minimax-m2.7",
1188      "name": "MiniMax: MiniMax M2.7",
1189      "cost_per_1m_in": 0.3,
1190      "cost_per_1m_out": 1.2,
1191      "cost_per_1m_in_cached": 0,
1192      "cost_per_1m_out_cached": 0.06,
1193      "context_window": 204800,
1194      "default_max_tokens": 65536,
1195      "can_reason": true,
1196      "reasoning_levels": [
1197        "low",
1198        "medium",
1199        "high"
1200      ],
1201      "default_reasoning_effort": "medium",
1202      "supports_attachments": false
1203    },
1204    {
1205      "id": "mistralai/mistral-large",
1206      "name": "Mistral Large",
1207      "cost_per_1m_in": 2,
1208      "cost_per_1m_out": 6,
1209      "cost_per_1m_in_cached": 0,
1210      "cost_per_1m_out_cached": 0.2,
1211      "context_window": 128000,
1212      "default_max_tokens": 12800,
1213      "can_reason": false,
1214      "supports_attachments": false
1215    },
1216    {
1217      "id": "mistralai/mistral-large-2407",
1218      "name": "Mistral Large 2407",
1219      "cost_per_1m_in": 2,
1220      "cost_per_1m_out": 6,
1221      "cost_per_1m_in_cached": 0,
1222      "cost_per_1m_out_cached": 0.2,
1223      "context_window": 131072,
1224      "default_max_tokens": 13107,
1225      "can_reason": false,
1226      "supports_attachments": false
1227    },
1228    {
1229      "id": "mistralai/mistral-large-2411",
1230      "name": "Mistral Large 2411",
1231      "cost_per_1m_in": 2,
1232      "cost_per_1m_out": 6,
1233      "cost_per_1m_in_cached": 0,
1234      "cost_per_1m_out_cached": 0.2,
1235      "context_window": 131072,
1236      "default_max_tokens": 13107,
1237      "can_reason": false,
1238      "supports_attachments": false
1239    },
1240    {
1241      "id": "mistralai/codestral-2508",
1242      "name": "Mistral: Codestral 2508",
1243      "cost_per_1m_in": 0.3,
1244      "cost_per_1m_out": 0.9,
1245      "cost_per_1m_in_cached": 0,
1246      "cost_per_1m_out_cached": 0.03,
1247      "context_window": 256000,
1248      "default_max_tokens": 25600,
1249      "can_reason": false,
1250      "supports_attachments": false
1251    },
1252    {
1253      "id": "mistralai/devstral-2512",
1254      "name": "Mistral: Devstral 2 2512",
1255      "cost_per_1m_in": 0.4,
1256      "cost_per_1m_out": 2,
1257      "cost_per_1m_in_cached": 0,
1258      "cost_per_1m_out_cached": 0.04,
1259      "context_window": 262144,
1260      "default_max_tokens": 26214,
1261      "can_reason": false,
1262      "supports_attachments": false
1263    },
1264    {
1265      "id": "mistralai/devstral-medium",
1266      "name": "Mistral: Devstral Medium",
1267      "cost_per_1m_in": 0.4,
1268      "cost_per_1m_out": 2,
1269      "cost_per_1m_in_cached": 0,
1270      "cost_per_1m_out_cached": 0.04,
1271      "context_window": 131072,
1272      "default_max_tokens": 13107,
1273      "can_reason": false,
1274      "supports_attachments": false
1275    },
1276    {
1277      "id": "mistralai/devstral-small",
1278      "name": "Mistral: Devstral Small 1.1",
1279      "cost_per_1m_in": 0.1,
1280      "cost_per_1m_out": 0.3,
1281      "cost_per_1m_in_cached": 0,
1282      "cost_per_1m_out_cached": 0.01,
1283      "context_window": 131072,
1284      "default_max_tokens": 13107,
1285      "can_reason": false,
1286      "supports_attachments": false
1287    },
1288    {
1289      "id": "mistralai/ministral-14b-2512",
1290      "name": "Mistral: Ministral 3 14B 2512",
1291      "cost_per_1m_in": 0.35,
1292      "cost_per_1m_out": 0.35,
1293      "cost_per_1m_in_cached": 0,
1294      "cost_per_1m_out_cached": 0,
1295      "context_window": 262144,
1296      "default_max_tokens": 131072,
1297      "can_reason": false,
1298      "supports_attachments": true
1299    },
1300    {
1301      "id": "mistralai/ministral-3b-2512",
1302      "name": "Mistral: Ministral 3 3B 2512",
1303      "cost_per_1m_in": 0.15,
1304      "cost_per_1m_out": 0.15,
1305      "cost_per_1m_in_cached": 0,
1306      "cost_per_1m_out_cached": 0,
1307      "context_window": 131072,
1308      "default_max_tokens": 65536,
1309      "can_reason": false,
1310      "supports_attachments": true
1311    },
1312    {
1313      "id": "mistralai/ministral-8b-2512",
1314      "name": "Mistral: Ministral 3 8B 2512",
1315      "cost_per_1m_in": 0.3,
1316      "cost_per_1m_out": 0.3,
1317      "cost_per_1m_in_cached": 0,
1318      "cost_per_1m_out_cached": 0,
1319      "context_window": 262144,
1320      "default_max_tokens": 131072,
1321      "can_reason": false,
1322      "supports_attachments": true
1323    },
1324    {
1325      "id": "mistralai/mistral-large-2512",
1326      "name": "Mistral: Mistral Large 3 2512",
1327      "cost_per_1m_in": 0.5,
1328      "cost_per_1m_out": 1.5,
1329      "cost_per_1m_in_cached": 0,
1330      "cost_per_1m_out_cached": 0.05,
1331      "context_window": 262144,
1332      "default_max_tokens": 26214,
1333      "can_reason": false,
1334      "supports_attachments": true
1335    },
1336    {
1337      "id": "mistralai/mistral-medium-3",
1338      "name": "Mistral: Mistral Medium 3",
1339      "cost_per_1m_in": 0.4,
1340      "cost_per_1m_out": 2,
1341      "cost_per_1m_in_cached": 0,
1342      "cost_per_1m_out_cached": 0.04,
1343      "context_window": 131072,
1344      "default_max_tokens": 13107,
1345      "can_reason": false,
1346      "supports_attachments": true
1347    },
1348    {
1349      "id": "mistralai/mistral-medium-3.1",
1350      "name": "Mistral: Mistral Medium 3.1",
1351      "cost_per_1m_in": 0.4,
1352      "cost_per_1m_out": 2,
1353      "cost_per_1m_in_cached": 0,
1354      "cost_per_1m_out_cached": 0.04,
1355      "context_window": 131072,
1356      "default_max_tokens": 13107,
1357      "can_reason": false,
1358      "supports_attachments": true
1359    },
1360    {
1361      "id": "mistralai/mistral-medium-3-5",
1362      "name": "Mistral: Mistral Medium 3.5",
1363      "cost_per_1m_in": 1.5,
1364      "cost_per_1m_out": 7.5,
1365      "cost_per_1m_in_cached": 0,
1366      "cost_per_1m_out_cached": 0,
1367      "context_window": 262144,
1368      "default_max_tokens": 26214,
1369      "can_reason": true,
1370      "reasoning_levels": [
1371        "low",
1372        "medium",
1373        "high"
1374      ],
1375      "default_reasoning_effort": "medium",
1376      "supports_attachments": true
1377    },
1378    {
1379      "id": "mistralai/mistral-nemo",
1380      "name": "Mistral: Mistral Nemo",
1381      "cost_per_1m_in": 0.15,
1382      "cost_per_1m_out": 0.15,
1383      "cost_per_1m_in_cached": 0,
1384      "cost_per_1m_out_cached": 0.015,
1385      "context_window": 131072,
1386      "default_max_tokens": 13107,
1387      "can_reason": false,
1388      "supports_attachments": false
1389    },
1390    {
1391      "id": "mistralai/mistral-small-3.2-24b-instruct",
1392      "name": "Mistral: Mistral Small 3.2 24B",
1393      "cost_per_1m_in": 0.09375,
1394      "cost_per_1m_out": 0.25,
1395      "cost_per_1m_in_cached": 0,
1396      "cost_per_1m_out_cached": 0,
1397      "context_window": 256000,
1398      "default_max_tokens": 8192,
1399      "can_reason": false,
1400      "supports_attachments": true
1401    },
1402    {
1403      "id": "mistralai/mistral-small-2603",
1404      "name": "Mistral: Mistral Small 4",
1405      "cost_per_1m_in": 0.15,
1406      "cost_per_1m_out": 0.6,
1407      "cost_per_1m_in_cached": 0,
1408      "cost_per_1m_out_cached": 0.015,
1409      "context_window": 262144,
1410      "default_max_tokens": 26214,
1411      "can_reason": true,
1412      "reasoning_levels": [
1413        "low",
1414        "medium",
1415        "high"
1416      ],
1417      "default_reasoning_effort": "medium",
1418      "supports_attachments": true
1419    },
1420    {
1421      "id": "mistralai/mixtral-8x22b-instruct",
1422      "name": "Mistral: Mixtral 8x22B Instruct",
1423      "cost_per_1m_in": 2,
1424      "cost_per_1m_out": 6,
1425      "cost_per_1m_in_cached": 0,
1426      "cost_per_1m_out_cached": 0.2,
1427      "context_window": 65536,
1428      "default_max_tokens": 6553,
1429      "can_reason": false,
1430      "supports_attachments": false
1431    },
1432    {
1433      "id": "mistralai/mixtral-8x7b-instruct",
1434      "name": "Mistral: Mixtral 8x7B Instruct",
1435      "cost_per_1m_in": 0.54,
1436      "cost_per_1m_out": 0.54,
1437      "cost_per_1m_in_cached": 0,
1438      "cost_per_1m_out_cached": 0,
1439      "context_window": 32768,
1440      "default_max_tokens": 8192,
1441      "can_reason": false,
1442      "supports_attachments": false
1443    },
1444    {
1445      "id": "mistralai/pixtral-large-2411",
1446      "name": "Mistral: Pixtral Large 2411",
1447      "cost_per_1m_in": 2,
1448      "cost_per_1m_out": 6,
1449      "cost_per_1m_in_cached": 0,
1450      "cost_per_1m_out_cached": 0.2,
1451      "context_window": 131072,
1452      "default_max_tokens": 13107,
1453      "can_reason": false,
1454      "supports_attachments": true
1455    },
1456    {
1457      "id": "mistralai/mistral-saba",
1458      "name": "Mistral: Saba",
1459      "cost_per_1m_in": 0.2,
1460      "cost_per_1m_out": 0.6,
1461      "cost_per_1m_in_cached": 0,
1462      "cost_per_1m_out_cached": 0.02,
1463      "context_window": 32768,
1464      "default_max_tokens": 3276,
1465      "can_reason": false,
1466      "supports_attachments": false
1467    },
1468    {
1469      "id": "mistralai/voxtral-small-24b-2507",
1470      "name": "Mistral: Voxtral Small 24B 2507",
1471      "cost_per_1m_in": 0.1,
1472      "cost_per_1m_out": 0.3,
1473      "cost_per_1m_in_cached": 0,
1474      "cost_per_1m_out_cached": 0.01,
1475      "context_window": 32000,
1476      "default_max_tokens": 3200,
1477      "can_reason": false,
1478      "supports_attachments": false
1479    },
1480    {
1481      "id": "moonshotai/kimi-k2",
1482      "name": "MoonshotAI: Kimi K2 0711",
1483      "cost_per_1m_in": 0.57,
1484      "cost_per_1m_out": 2.3,
1485      "cost_per_1m_in_cached": 0,
1486      "cost_per_1m_out_cached": 0,
1487      "context_window": 131072,
1488      "default_max_tokens": 16384,
1489      "can_reason": false,
1490      "supports_attachments": false
1491    },
1492    {
1493      "id": "moonshotai/kimi-k2-0905",
1494      "name": "MoonshotAI: Kimi K2 0905",
1495      "cost_per_1m_in": 0.6,
1496      "cost_per_1m_out": 2.5,
1497      "cost_per_1m_in_cached": 0,
1498      "cost_per_1m_out_cached": 0,
1499      "context_window": 262144,
1500      "default_max_tokens": 131072,
1501      "can_reason": false,
1502      "supports_attachments": false
1503    },
1504    {
1505      "id": "moonshotai/kimi-k2-thinking",
1506      "name": "MoonshotAI: Kimi K2 Thinking",
1507      "cost_per_1m_in": 0.6,
1508      "cost_per_1m_out": 2.5,
1509      "cost_per_1m_in_cached": 0,
1510      "cost_per_1m_out_cached": 0.15,
1511      "context_window": 262144,
1512      "default_max_tokens": 131072,
1513      "can_reason": true,
1514      "reasoning_levels": [
1515        "low",
1516        "medium",
1517        "high"
1518      ],
1519      "default_reasoning_effort": "medium",
1520      "supports_attachments": false
1521    },
1522    {
1523      "id": "moonshotai/kimi-k2.5",
1524      "name": "MoonshotAI: Kimi K2.5",
1525      "cost_per_1m_in": 0.45,
1526      "cost_per_1m_out": 2.25,
1527      "cost_per_1m_in_cached": 0,
1528      "cost_per_1m_out_cached": 0.07,
1529      "context_window": 262144,
1530      "default_max_tokens": 32000,
1531      "can_reason": true,
1532      "reasoning_levels": [
1533        "low",
1534        "medium",
1535        "high"
1536      ],
1537      "default_reasoning_effort": "medium",
1538      "supports_attachments": true
1539    },
1540    {
1541      "id": "moonshotai/kimi-k2.6",
1542      "name": "MoonshotAI: Kimi K2.6",
1543      "cost_per_1m_in": 0.95,
1544      "cost_per_1m_out": 4,
1545      "cost_per_1m_in_cached": 0,
1546      "cost_per_1m_out_cached": 0.16,
1547      "context_window": 262144,
1548      "default_max_tokens": 26214,
1549      "can_reason": true,
1550      "reasoning_levels": [
1551        "low",
1552        "medium",
1553        "high"
1554      ],
1555      "default_reasoning_effort": "medium",
1556      "supports_attachments": true
1557    },
1558    {
1559      "id": "nvidia/llama-3.1-nemotron-70b-instruct",
1560      "name": "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
1561      "cost_per_1m_in": 1.2,
1562      "cost_per_1m_out": 1.2,
1563      "cost_per_1m_in_cached": 0,
1564      "cost_per_1m_out_cached": 0,
1565      "context_window": 131072,
1566      "default_max_tokens": 8192,
1567      "can_reason": false,
1568      "supports_attachments": false
1569    },
1570    {
1571      "id": "nvidia/llama-3.3-nemotron-super-49b-v1.5",
1572      "name": "NVIDIA: Llama 3.3 Nemotron Super 49B V1.5",
1573      "cost_per_1m_in": 0.1,
1574      "cost_per_1m_out": 0.4,
1575      "cost_per_1m_in_cached": 0,
1576      "cost_per_1m_out_cached": 0,
1577      "context_window": 131072,
1578      "default_max_tokens": 8192,
1579      "can_reason": true,
1580      "reasoning_levels": [
1581        "low",
1582        "medium",
1583        "high"
1584      ],
1585      "default_reasoning_effort": "medium",
1586      "supports_attachments": false
1587    },
1588    {
1589      "id": "nvidia/nemotron-3-nano-30b-a3b",
1590      "name": "NVIDIA: Nemotron 3 Nano 30B A3B",
1591      "cost_per_1m_in": 0.05,
1592      "cost_per_1m_out": 0.2,
1593      "cost_per_1m_in_cached": 0,
1594      "cost_per_1m_out_cached": 0,
1595      "context_window": 262144,
1596      "default_max_tokens": 114000,
1597      "can_reason": true,
1598      "reasoning_levels": [
1599        "low",
1600        "medium",
1601        "high"
1602      ],
1603      "default_reasoning_effort": "medium",
1604      "supports_attachments": false
1605    },
1606    {
1607      "id": "nvidia/nemotron-3-nano-30b-a3b:free",
1608      "name": "NVIDIA: Nemotron 3 Nano 30B A3B (free)",
1609      "cost_per_1m_in": 0,
1610      "cost_per_1m_out": 0,
1611      "cost_per_1m_in_cached": 0,
1612      "cost_per_1m_out_cached": 0,
1613      "context_window": 256000,
1614      "default_max_tokens": 25600,
1615      "can_reason": true,
1616      "reasoning_levels": [
1617        "low",
1618        "medium",
1619        "high"
1620      ],
1621      "default_reasoning_effort": "medium",
1622      "supports_attachments": false
1623    },
1624    {
1625      "id": "nvidia/nemotron-3-nano-omni-30b-a3b-reasoning:free",
1626      "name": "NVIDIA: Nemotron 3 Nano Omni (free)",
1627      "cost_per_1m_in": 0,
1628      "cost_per_1m_out": 0,
1629      "cost_per_1m_in_cached": 0,
1630      "cost_per_1m_out_cached": 0,
1631      "context_window": 256000,
1632      "default_max_tokens": 32768,
1633      "can_reason": true,
1634      "reasoning_levels": [
1635        "low",
1636        "medium",
1637        "high"
1638      ],
1639      "default_reasoning_effort": "medium",
1640      "supports_attachments": true
1641    },
1642    {
1643      "id": "nvidia/nemotron-3-super-120b-a12b",
1644      "name": "NVIDIA: Nemotron 3 Super",
1645      "cost_per_1m_in": 0.1,
1646      "cost_per_1m_out": 0.5,
1647      "cost_per_1m_in_cached": 0,
1648      "cost_per_1m_out_cached": 0,
1649      "context_window": 262144,
1650      "default_max_tokens": 8192,
1651      "can_reason": true,
1652      "reasoning_levels": [
1653        "low",
1654        "medium",
1655        "high"
1656      ],
1657      "default_reasoning_effort": "medium",
1658      "supports_attachments": false
1659    },
1660    {
1661      "id": "nvidia/nemotron-3-super-120b-a12b:free",
1662      "name": "NVIDIA: Nemotron 3 Super (free)",
1663      "cost_per_1m_in": 0,
1664      "cost_per_1m_out": 0,
1665      "cost_per_1m_in_cached": 0,
1666      "cost_per_1m_out_cached": 0,
1667      "context_window": 262144,
1668      "default_max_tokens": 131072,
1669      "can_reason": true,
1670      "reasoning_levels": [
1671        "low",
1672        "medium",
1673        "high"
1674      ],
1675      "default_reasoning_effort": "medium",
1676      "supports_attachments": false
1677    },
1678    {
1679      "id": "nvidia/nemotron-nano-12b-v2-vl:free",
1680      "name": "NVIDIA: Nemotron Nano 12B 2 VL (free)",
1681      "cost_per_1m_in": 0,
1682      "cost_per_1m_out": 0,
1683      "cost_per_1m_in_cached": 0,
1684      "cost_per_1m_out_cached": 0,
1685      "context_window": 128000,
1686      "default_max_tokens": 64000,
1687      "can_reason": true,
1688      "reasoning_levels": [
1689        "low",
1690        "medium",
1691        "high"
1692      ],
1693      "default_reasoning_effort": "medium",
1694      "supports_attachments": true
1695    },
1696    {
1697      "id": "nvidia/nemotron-nano-9b-v2",
1698      "name": "NVIDIA: Nemotron Nano 9B V2",
1699      "cost_per_1m_in": 0.04,
1700      "cost_per_1m_out": 0.16,
1701      "cost_per_1m_in_cached": 0,
1702      "cost_per_1m_out_cached": 0,
1703      "context_window": 131072,
1704      "default_max_tokens": 8192,
1705      "can_reason": true,
1706      "reasoning_levels": [
1707        "low",
1708        "medium",
1709        "high"
1710      ],
1711      "default_reasoning_effort": "medium",
1712      "supports_attachments": false
1713    },
1714    {
1715      "id": "nvidia/nemotron-nano-9b-v2:free",
1716      "name": "NVIDIA: Nemotron Nano 9B V2 (free)",
1717      "cost_per_1m_in": 0,
1718      "cost_per_1m_out": 0,
1719      "cost_per_1m_in_cached": 0,
1720      "cost_per_1m_out_cached": 0,
1721      "context_window": 128000,
1722      "default_max_tokens": 12800,
1723      "can_reason": true,
1724      "reasoning_levels": [
1725        "low",
1726        "medium",
1727        "high"
1728      ],
1729      "default_reasoning_effort": "medium",
1730      "supports_attachments": false
1731    },
1732    {
1733      "id": "nex-agi/deepseek-v3.1-nex-n1",
1734      "name": "Nex AGI: DeepSeek V3.1 Nex N1",
1735      "cost_per_1m_in": 0.135,
1736      "cost_per_1m_out": 0.5,
1737      "cost_per_1m_in_cached": 0,
1738      "cost_per_1m_out_cached": 0,
1739      "context_window": 131072,
1740      "default_max_tokens": 81920,
1741      "can_reason": false,
1742      "supports_attachments": false
1743    },
1744    {
1745      "id": "openai/gpt-audio",
1746      "name": "OpenAI: GPT Audio",
1747      "cost_per_1m_in": 2.5,
1748      "cost_per_1m_out": 10,
1749      "cost_per_1m_in_cached": 0,
1750      "cost_per_1m_out_cached": 0,
1751      "context_window": 128000,
1752      "default_max_tokens": 8192,
1753      "can_reason": false,
1754      "supports_attachments": false
1755    },
1756    {
1757      "id": "openai/gpt-audio-mini",
1758      "name": "OpenAI: GPT Audio Mini",
1759      "cost_per_1m_in": 0.6,
1760      "cost_per_1m_out": 2.4,
1761      "cost_per_1m_in_cached": 0,
1762      "cost_per_1m_out_cached": 0,
1763      "context_window": 128000,
1764      "default_max_tokens": 8192,
1765      "can_reason": false,
1766      "supports_attachments": false
1767    },
1768    {
1769      "id": "openai/gpt-chat-latest",
1770      "name": "OpenAI: GPT Chat Latest",
1771      "cost_per_1m_in": 5,
1772      "cost_per_1m_out": 30,
1773      "cost_per_1m_in_cached": 0,
1774      "cost_per_1m_out_cached": 0.5,
1775      "context_window": 400000,
1776      "default_max_tokens": 64000,
1777      "can_reason": false,
1778      "supports_attachments": true
1779    },
1780    {
1781      "id": "openai/gpt-4-turbo",
1782      "name": "OpenAI: GPT-4 Turbo",
1783      "cost_per_1m_in": 10,
1784      "cost_per_1m_out": 30,
1785      "cost_per_1m_in_cached": 0,
1786      "cost_per_1m_out_cached": 0,
1787      "context_window": 128000,
1788      "default_max_tokens": 2048,
1789      "can_reason": false,
1790      "supports_attachments": true
1791    },
1792    {
1793      "id": "openai/gpt-4-1106-preview",
1794      "name": "OpenAI: GPT-4 Turbo (older v1106)",
1795      "cost_per_1m_in": 10,
1796      "cost_per_1m_out": 30,
1797      "cost_per_1m_in_cached": 0,
1798      "cost_per_1m_out_cached": 0,
1799      "context_window": 128000,
1800      "default_max_tokens": 2048,
1801      "can_reason": false,
1802      "supports_attachments": false
1803    },
1804    {
1805      "id": "openai/gpt-4-turbo-preview",
1806      "name": "OpenAI: GPT-4 Turbo Preview",
1807      "cost_per_1m_in": 10,
1808      "cost_per_1m_out": 30,
1809      "cost_per_1m_in_cached": 0,
1810      "cost_per_1m_out_cached": 0,
1811      "context_window": 128000,
1812      "default_max_tokens": 2048,
1813      "can_reason": false,
1814      "supports_attachments": false
1815    },
1816    {
1817      "id": "openai/gpt-4.1",
1818      "name": "OpenAI: GPT-4.1",
1819      "cost_per_1m_in": 2,
1820      "cost_per_1m_out": 8,
1821      "cost_per_1m_in_cached": 0,
1822      "cost_per_1m_out_cached": 0.5,
1823      "context_window": 1047576,
1824      "default_max_tokens": 16384,
1825      "can_reason": false,
1826      "supports_attachments": true
1827    },
1828    {
1829      "id": "openai/gpt-4.1-mini",
1830      "name": "OpenAI: GPT-4.1 Mini",
1831      "cost_per_1m_in": 0.4,
1832      "cost_per_1m_out": 1.6,
1833      "cost_per_1m_in_cached": 0,
1834      "cost_per_1m_out_cached": 0.1,
1835      "context_window": 1047576,
1836      "default_max_tokens": 104757,
1837      "can_reason": false,
1838      "supports_attachments": true
1839    },
1840    {
1841      "id": "openai/gpt-4.1-nano",
1842      "name": "OpenAI: GPT-4.1 Nano",
1843      "cost_per_1m_in": 0.1,
1844      "cost_per_1m_out": 0.4,
1845      "cost_per_1m_in_cached": 0,
1846      "cost_per_1m_out_cached": 0.025,
1847      "context_window": 1047576,
1848      "default_max_tokens": 16384,
1849      "can_reason": false,
1850      "supports_attachments": true
1851    },
1852    {
1853      "id": "openai/gpt-4o",
1854      "name": "OpenAI: GPT-4o",
1855      "cost_per_1m_in": 2.5,
1856      "cost_per_1m_out": 10,
1857      "cost_per_1m_in_cached": 0,
1858      "cost_per_1m_out_cached": 0,
1859      "context_window": 128000,
1860      "default_max_tokens": 8192,
1861      "can_reason": false,
1862      "supports_attachments": true
1863    },
1864    {
1865      "id": "openai/gpt-4o-2024-05-13",
1866      "name": "OpenAI: GPT-4o (2024-05-13)",
1867      "cost_per_1m_in": 5,
1868      "cost_per_1m_out": 15,
1869      "cost_per_1m_in_cached": 0,
1870      "cost_per_1m_out_cached": 0,
1871      "context_window": 128000,
1872      "default_max_tokens": 2048,
1873      "can_reason": false,
1874      "supports_attachments": true
1875    },
1876    {
1877      "id": "openai/gpt-4o-2024-08-06",
1878      "name": "OpenAI: GPT-4o (2024-08-06)",
1879      "cost_per_1m_in": 2.5,
1880      "cost_per_1m_out": 10,
1881      "cost_per_1m_in_cached": 0,
1882      "cost_per_1m_out_cached": 1.25,
1883      "context_window": 128000,
1884      "default_max_tokens": 8192,
1885      "can_reason": false,
1886      "supports_attachments": true
1887    },
1888    {
1889      "id": "openai/gpt-4o-2024-11-20",
1890      "name": "OpenAI: GPT-4o (2024-11-20)",
1891      "cost_per_1m_in": 2.5,
1892      "cost_per_1m_out": 10,
1893      "cost_per_1m_in_cached": 0,
1894      "cost_per_1m_out_cached": 1.25,
1895      "context_window": 128000,
1896      "default_max_tokens": 8192,
1897      "can_reason": false,
1898      "supports_attachments": true
1899    },
1900    {
1901      "id": "openai/gpt-4o-audio-preview",
1902      "name": "OpenAI: GPT-4o Audio",
1903      "cost_per_1m_in": 2.5,
1904      "cost_per_1m_out": 10,
1905      "cost_per_1m_in_cached": 0,
1906      "cost_per_1m_out_cached": 0,
1907      "context_window": 128000,
1908      "default_max_tokens": 8192,
1909      "can_reason": false,
1910      "supports_attachments": false
1911    },
1912    {
1913      "id": "openai/gpt-4o-mini",
1914      "name": "OpenAI: GPT-4o-mini",
1915      "cost_per_1m_in": 0.15,
1916      "cost_per_1m_out": 0.6,
1917      "cost_per_1m_in_cached": 0,
1918      "cost_per_1m_out_cached": 0.075,
1919      "context_window": 128000,
1920      "default_max_tokens": 8192,
1921      "can_reason": false,
1922      "supports_attachments": true
1923    },
1924    {
1925      "id": "openai/gpt-4o-mini-2024-07-18",
1926      "name": "OpenAI: GPT-4o-mini (2024-07-18)",
1927      "cost_per_1m_in": 0.15,
1928      "cost_per_1m_out": 0.6,
1929      "cost_per_1m_in_cached": 0,
1930      "cost_per_1m_out_cached": 0.075,
1931      "context_window": 128000,
1932      "default_max_tokens": 8192,
1933      "can_reason": false,
1934      "supports_attachments": true
1935    },
1936    {
1937      "id": "openai/gpt-5",
1938      "name": "OpenAI: GPT-5",
1939      "cost_per_1m_in": 1.25,
1940      "cost_per_1m_out": 10,
1941      "cost_per_1m_in_cached": 0,
1942      "cost_per_1m_out_cached": 0.125,
1943      "context_window": 400000,
1944      "default_max_tokens": 64000,
1945      "can_reason": true,
1946      "reasoning_levels": [
1947        "low",
1948        "medium",
1949        "high"
1950      ],
1951      "default_reasoning_effort": "medium",
1952      "supports_attachments": true
1953    },
1954    {
1955      "id": "openai/gpt-5-codex",
1956      "name": "OpenAI: GPT-5 Codex",
1957      "cost_per_1m_in": 1.25,
1958      "cost_per_1m_out": 10,
1959      "cost_per_1m_in_cached": 0,
1960      "cost_per_1m_out_cached": 0.125,
1961      "context_window": 400000,
1962      "default_max_tokens": 64000,
1963      "can_reason": true,
1964      "reasoning_levels": [
1965        "low",
1966        "medium",
1967        "high"
1968      ],
1969      "default_reasoning_effort": "medium",
1970      "supports_attachments": true
1971    },
1972    {
1973      "id": "openai/gpt-5-mini",
1974      "name": "OpenAI: GPT-5 Mini",
1975      "cost_per_1m_in": 0.25,
1976      "cost_per_1m_out": 2,
1977      "cost_per_1m_in_cached": 0,
1978      "cost_per_1m_out_cached": 0.03,
1979      "context_window": 400000,
1980      "default_max_tokens": 40000,
1981      "can_reason": true,
1982      "reasoning_levels": [
1983        "low",
1984        "medium",
1985        "high"
1986      ],
1987      "default_reasoning_effort": "medium",
1988      "supports_attachments": true
1989    },
1990    {
1991      "id": "openai/gpt-5-nano",
1992      "name": "OpenAI: GPT-5 Nano",
1993      "cost_per_1m_in": 0.05,
1994      "cost_per_1m_out": 0.4,
1995      "cost_per_1m_in_cached": 0,
1996      "cost_per_1m_out_cached": 0.01,
1997      "context_window": 400000,
1998      "default_max_tokens": 40000,
1999      "can_reason": true,
2000      "reasoning_levels": [
2001        "low",
2002        "medium",
2003        "high"
2004      ],
2005      "default_reasoning_effort": "medium",
2006      "supports_attachments": true
2007    },
2008    {
2009      "id": "openai/gpt-5-pro",
2010      "name": "OpenAI: GPT-5 Pro",
2011      "cost_per_1m_in": 15,
2012      "cost_per_1m_out": 120,
2013      "cost_per_1m_in_cached": 0,
2014      "cost_per_1m_out_cached": 0,
2015      "context_window": 400000,
2016      "default_max_tokens": 64000,
2017      "can_reason": true,
2018      "reasoning_levels": [
2019        "low",
2020        "medium",
2021        "high"
2022      ],
2023      "default_reasoning_effort": "medium",
2024      "supports_attachments": true
2025    },
2026    {
2027      "id": "openai/gpt-5.1",
2028      "name": "OpenAI: GPT-5.1",
2029      "cost_per_1m_in": 1.25,
2030      "cost_per_1m_out": 10,
2031      "cost_per_1m_in_cached": 0,
2032      "cost_per_1m_out_cached": 0.13,
2033      "context_window": 400000,
2034      "default_max_tokens": 64000,
2035      "can_reason": true,
2036      "reasoning_levels": [
2037        "low",
2038        "medium",
2039        "high"
2040      ],
2041      "default_reasoning_effort": "medium",
2042      "supports_attachments": true
2043    },
2044    {
2045      "id": "openai/gpt-5.1-chat",
2046      "name": "OpenAI: GPT-5.1 Chat",
2047      "cost_per_1m_in": 1.25,
2048      "cost_per_1m_out": 10,
2049      "cost_per_1m_in_cached": 0,
2050      "cost_per_1m_out_cached": 0.125,
2051      "context_window": 128000,
2052      "default_max_tokens": 8192,
2053      "can_reason": false,
2054      "supports_attachments": true
2055    },
2056    {
2057      "id": "openai/gpt-5.1-codex",
2058      "name": "OpenAI: GPT-5.1-Codex",
2059      "cost_per_1m_in": 1.25,
2060      "cost_per_1m_out": 10,
2061      "cost_per_1m_in_cached": 0,
2062      "cost_per_1m_out_cached": 0.125,
2063      "context_window": 400000,
2064      "default_max_tokens": 64000,
2065      "can_reason": true,
2066      "reasoning_levels": [
2067        "low",
2068        "medium",
2069        "high"
2070      ],
2071      "default_reasoning_effort": "medium",
2072      "supports_attachments": true
2073    },
2074    {
2075      "id": "openai/gpt-5.1-codex-max",
2076      "name": "OpenAI: GPT-5.1-Codex-Max",
2077      "cost_per_1m_in": 1.25,
2078      "cost_per_1m_out": 10,
2079      "cost_per_1m_in_cached": 0,
2080      "cost_per_1m_out_cached": 0.125,
2081      "context_window": 400000,
2082      "default_max_tokens": 64000,
2083      "can_reason": true,
2084      "reasoning_levels": [
2085        "low",
2086        "medium",
2087        "high"
2088      ],
2089      "default_reasoning_effort": "medium",
2090      "supports_attachments": true
2091    },
2092    {
2093      "id": "openai/gpt-5.1-codex-mini",
2094      "name": "OpenAI: GPT-5.1-Codex-Mini",
2095      "cost_per_1m_in": 0.25,
2096      "cost_per_1m_out": 2,
2097      "cost_per_1m_in_cached": 0,
2098      "cost_per_1m_out_cached": 0.025,
2099      "context_window": 400000,
2100      "default_max_tokens": 50000,
2101      "can_reason": true,
2102      "reasoning_levels": [
2103        "low",
2104        "medium",
2105        "high"
2106      ],
2107      "default_reasoning_effort": "medium",
2108      "supports_attachments": true
2109    },
2110    {
2111      "id": "openai/gpt-5.2",
2112      "name": "OpenAI: GPT-5.2",
2113      "cost_per_1m_in": 1.75,
2114      "cost_per_1m_out": 14,
2115      "cost_per_1m_in_cached": 0,
2116      "cost_per_1m_out_cached": 0.175,
2117      "context_window": 400000,
2118      "default_max_tokens": 64000,
2119      "can_reason": true,
2120      "reasoning_levels": [
2121        "low",
2122        "medium",
2123        "high"
2124      ],
2125      "default_reasoning_effort": "medium",
2126      "supports_attachments": true
2127    },
2128    {
2129      "id": "openai/gpt-5.2-chat",
2130      "name": "OpenAI: GPT-5.2 Chat",
2131      "cost_per_1m_in": 1.75,
2132      "cost_per_1m_out": 14,
2133      "cost_per_1m_in_cached": 0,
2134      "cost_per_1m_out_cached": 0.175,
2135      "context_window": 128000,
2136      "default_max_tokens": 16000,
2137      "can_reason": false,
2138      "supports_attachments": true
2139    },
2140    {
2141      "id": "openai/gpt-5.2-pro",
2142      "name": "OpenAI: GPT-5.2 Pro",
2143      "cost_per_1m_in": 21,
2144      "cost_per_1m_out": 168,
2145      "cost_per_1m_in_cached": 0,
2146      "cost_per_1m_out_cached": 0,
2147      "context_window": 400000,
2148      "default_max_tokens": 64000,
2149      "can_reason": true,
2150      "reasoning_levels": [
2151        "low",
2152        "medium",
2153        "high"
2154      ],
2155      "default_reasoning_effort": "medium",
2156      "supports_attachments": true
2157    },
2158    {
2159      "id": "openai/gpt-5.2-codex",
2160      "name": "OpenAI: GPT-5.2-Codex",
2161      "cost_per_1m_in": 1.75,
2162      "cost_per_1m_out": 14,
2163      "cost_per_1m_in_cached": 0,
2164      "cost_per_1m_out_cached": 0.175,
2165      "context_window": 400000,
2166      "default_max_tokens": 64000,
2167      "can_reason": true,
2168      "reasoning_levels": [
2169        "low",
2170        "medium",
2171        "high"
2172      ],
2173      "default_reasoning_effort": "medium",
2174      "supports_attachments": true
2175    },
2176    {
2177      "id": "openai/gpt-5.3-chat",
2178      "name": "OpenAI: GPT-5.3 Chat",
2179      "cost_per_1m_in": 1.75,
2180      "cost_per_1m_out": 14,
2181      "cost_per_1m_in_cached": 0,
2182      "cost_per_1m_out_cached": 0.175,
2183      "context_window": 128000,
2184      "default_max_tokens": 8192,
2185      "can_reason": false,
2186      "supports_attachments": true
2187    },
2188    {
2189      "id": "openai/gpt-5.3-codex",
2190      "name": "OpenAI: GPT-5.3-Codex",
2191      "cost_per_1m_in": 1.75,
2192      "cost_per_1m_out": 14,
2193      "cost_per_1m_in_cached": 0,
2194      "cost_per_1m_out_cached": 0.175,
2195      "context_window": 400000,
2196      "default_max_tokens": 64000,
2197      "can_reason": true,
2198      "reasoning_levels": [
2199        "low",
2200        "medium",
2201        "high"
2202      ],
2203      "default_reasoning_effort": "medium",
2204      "supports_attachments": true
2205    },
2206    {
2207      "id": "openai/gpt-5.4",
2208      "name": "OpenAI: GPT-5.4",
2209      "cost_per_1m_in": 2.5,
2210      "cost_per_1m_out": 15,
2211      "cost_per_1m_in_cached": 0,
2212      "cost_per_1m_out_cached": 0.25,
2213      "context_window": 1050000,
2214      "default_max_tokens": 64000,
2215      "can_reason": true,
2216      "reasoning_levels": [
2217        "low",
2218        "medium",
2219        "high"
2220      ],
2221      "default_reasoning_effort": "medium",
2222      "supports_attachments": true
2223    },
2224    {
2225      "id": "openai/gpt-5.4-mini",
2226      "name": "OpenAI: GPT-5.4 Mini",
2227      "cost_per_1m_in": 0.75,
2228      "cost_per_1m_out": 4.5,
2229      "cost_per_1m_in_cached": 0,
2230      "cost_per_1m_out_cached": 0.075,
2231      "context_window": 400000,
2232      "default_max_tokens": 64000,
2233      "can_reason": true,
2234      "reasoning_levels": [
2235        "low",
2236        "medium",
2237        "high"
2238      ],
2239      "default_reasoning_effort": "medium",
2240      "supports_attachments": true
2241    },
2242    {
2243      "id": "openai/gpt-5.4-nano",
2244      "name": "OpenAI: GPT-5.4 Nano",
2245      "cost_per_1m_in": 0.2,
2246      "cost_per_1m_out": 1.25,
2247      "cost_per_1m_in_cached": 0,
2248      "cost_per_1m_out_cached": 0.02,
2249      "context_window": 400000,
2250      "default_max_tokens": 64000,
2251      "can_reason": true,
2252      "reasoning_levels": [
2253        "low",
2254        "medium",
2255        "high"
2256      ],
2257      "default_reasoning_effort": "medium",
2258      "supports_attachments": true
2259    },
2260    {
2261      "id": "openai/gpt-5.4-pro",
2262      "name": "OpenAI: GPT-5.4 Pro",
2263      "cost_per_1m_in": 30,
2264      "cost_per_1m_out": 180,
2265      "cost_per_1m_in_cached": 0,
2266      "cost_per_1m_out_cached": 0,
2267      "context_window": 1050000,
2268      "default_max_tokens": 64000,
2269      "can_reason": true,
2270      "reasoning_levels": [
2271        "low",
2272        "medium",
2273        "high"
2274      ],
2275      "default_reasoning_effort": "medium",
2276      "supports_attachments": true
2277    },
2278    {
2279      "id": "openai/gpt-5.5",
2280      "name": "OpenAI: GPT-5.5",
2281      "cost_per_1m_in": 5,
2282      "cost_per_1m_out": 30,
2283      "cost_per_1m_in_cached": 0,
2284      "cost_per_1m_out_cached": 0.5,
2285      "context_window": 1050000,
2286      "default_max_tokens": 64000,
2287      "can_reason": true,
2288      "reasoning_levels": [
2289        "low",
2290        "medium",
2291        "high"
2292      ],
2293      "default_reasoning_effort": "medium",
2294      "supports_attachments": true
2295    },
2296    {
2297      "id": "openai/gpt-5.5-pro",
2298      "name": "OpenAI: GPT-5.5 Pro",
2299      "cost_per_1m_in": 30,
2300      "cost_per_1m_out": 180,
2301      "cost_per_1m_in_cached": 0,
2302      "cost_per_1m_out_cached": 0,
2303      "context_window": 1050000,
2304      "default_max_tokens": 64000,
2305      "can_reason": true,
2306      "reasoning_levels": [
2307        "low",
2308        "medium",
2309        "high"
2310      ],
2311      "default_reasoning_effort": "medium",
2312      "supports_attachments": true
2313    },
2314    {
2315      "id": "openai/gpt-oss-120b",
2316      "name": "OpenAI: gpt-oss-120b",
2317      "cost_per_1m_in": 0.05,
2318      "cost_per_1m_out": 0.25,
2319      "cost_per_1m_in_cached": 0,
2320      "cost_per_1m_out_cached": 0,
2321      "context_window": 131072,
2322      "default_max_tokens": 16384,
2323      "can_reason": true,
2324      "reasoning_levels": [
2325        "low",
2326        "medium",
2327        "high"
2328      ],
2329      "default_reasoning_effort": "medium",
2330      "supports_attachments": false
2331    },
2332    {
2333      "id": "openai/gpt-oss-120b:free",
2334      "name": "OpenAI: gpt-oss-120b (free)",
2335      "cost_per_1m_in": 0,
2336      "cost_per_1m_out": 0,
2337      "cost_per_1m_in_cached": 0,
2338      "cost_per_1m_out_cached": 0,
2339      "context_window": 131072,
2340      "default_max_tokens": 65536,
2341      "can_reason": true,
2342      "reasoning_levels": [
2343        "low",
2344        "medium",
2345        "high"
2346      ],
2347      "default_reasoning_effort": "medium",
2348      "supports_attachments": false
2349    },
2350    {
2351      "id": "openai/gpt-oss-20b",
2352      "name": "OpenAI: gpt-oss-20b",
2353      "cost_per_1m_in": 0.03,
2354      "cost_per_1m_out": 0.14,
2355      "cost_per_1m_in_cached": 0,
2356      "cost_per_1m_out_cached": 0,
2357      "context_window": 131072,
2358      "default_max_tokens": 65536,
2359      "can_reason": true,
2360      "reasoning_levels": [
2361        "low",
2362        "medium",
2363        "high"
2364      ],
2365      "default_reasoning_effort": "medium",
2366      "supports_attachments": false
2367    },
2368    {
2369      "id": "openai/gpt-oss-20b:free",
2370      "name": "OpenAI: gpt-oss-20b (free)",
2371      "cost_per_1m_in": 0,
2372      "cost_per_1m_out": 0,
2373      "cost_per_1m_in_cached": 0,
2374      "cost_per_1m_out_cached": 0,
2375      "context_window": 131072,
2376      "default_max_tokens": 4096,
2377      "can_reason": true,
2378      "reasoning_levels": [
2379        "low",
2380        "medium",
2381        "high"
2382      ],
2383      "default_reasoning_effort": "medium",
2384      "supports_attachments": false
2385    },
2386    {
2387      "id": "openai/gpt-oss-safeguard-20b",
2388      "name": "OpenAI: gpt-oss-safeguard-20b",
2389      "cost_per_1m_in": 0.075,
2390      "cost_per_1m_out": 0.3,
2391      "cost_per_1m_in_cached": 0,
2392      "cost_per_1m_out_cached": 0.037,
2393      "context_window": 131072,
2394      "default_max_tokens": 32768,
2395      "can_reason": true,
2396      "reasoning_levels": [
2397        "low",
2398        "medium",
2399        "high"
2400      ],
2401      "default_reasoning_effort": "medium",
2402      "supports_attachments": false
2403    },
2404    {
2405      "id": "openai/o1",
2406      "name": "OpenAI: o1",
2407      "cost_per_1m_in": 15,
2408      "cost_per_1m_out": 60,
2409      "cost_per_1m_in_cached": 0,
2410      "cost_per_1m_out_cached": 7.5,
2411      "context_window": 200000,
2412      "default_max_tokens": 50000,
2413      "can_reason": true,
2414      "reasoning_levels": [
2415        "low",
2416        "medium",
2417        "high"
2418      ],
2419      "default_reasoning_effort": "medium",
2420      "supports_attachments": true
2421    },
2422    {
2423      "id": "openai/o3",
2424      "name": "OpenAI: o3",
2425      "cost_per_1m_in": 2,
2426      "cost_per_1m_out": 8,
2427      "cost_per_1m_in_cached": 0,
2428      "cost_per_1m_out_cached": 0.5,
2429      "context_window": 200000,
2430      "default_max_tokens": 50000,
2431      "can_reason": true,
2432      "reasoning_levels": [
2433        "low",
2434        "medium",
2435        "high"
2436      ],
2437      "default_reasoning_effort": "medium",
2438      "supports_attachments": true
2439    },
2440    {
2441      "id": "openai/o3-deep-research",
2442      "name": "OpenAI: o3 Deep Research",
2443      "cost_per_1m_in": 10,
2444      "cost_per_1m_out": 40,
2445      "cost_per_1m_in_cached": 0,
2446      "cost_per_1m_out_cached": 2.5,
2447      "context_window": 200000,
2448      "default_max_tokens": 50000,
2449      "can_reason": true,
2450      "reasoning_levels": [
2451        "low",
2452        "medium",
2453        "high"
2454      ],
2455      "default_reasoning_effort": "medium",
2456      "supports_attachments": true
2457    },
2458    {
2459      "id": "openai/o3-mini",
2460      "name": "OpenAI: o3 Mini",
2461      "cost_per_1m_in": 1.1,
2462      "cost_per_1m_out": 4.4,
2463      "cost_per_1m_in_cached": 0,
2464      "cost_per_1m_out_cached": 0.55,
2465      "context_window": 200000,
2466      "default_max_tokens": 50000,
2467      "can_reason": true,
2468      "reasoning_levels": [
2469        "low",
2470        "medium",
2471        "high"
2472      ],
2473      "default_reasoning_effort": "medium",
2474      "supports_attachments": false
2475    },
2476    {
2477      "id": "openai/o3-mini-high",
2478      "name": "OpenAI: o3 Mini High",
2479      "cost_per_1m_in": 1.1,
2480      "cost_per_1m_out": 4.4,
2481      "cost_per_1m_in_cached": 0,
2482      "cost_per_1m_out_cached": 0.55,
2483      "context_window": 200000,
2484      "default_max_tokens": 50000,
2485      "can_reason": true,
2486      "reasoning_levels": [
2487        "low",
2488        "medium",
2489        "high"
2490      ],
2491      "default_reasoning_effort": "medium",
2492      "supports_attachments": false
2493    },
2494    {
2495      "id": "openai/o3-pro",
2496      "name": "OpenAI: o3 Pro",
2497      "cost_per_1m_in": 20,
2498      "cost_per_1m_out": 80,
2499      "cost_per_1m_in_cached": 0,
2500      "cost_per_1m_out_cached": 0,
2501      "context_window": 200000,
2502      "default_max_tokens": 50000,
2503      "can_reason": true,
2504      "reasoning_levels": [
2505        "low",
2506        "medium",
2507        "high"
2508      ],
2509      "default_reasoning_effort": "medium",
2510      "supports_attachments": true
2511    },
2512    {
2513      "id": "openai/o4-mini",
2514      "name": "OpenAI: o4 Mini",
2515      "cost_per_1m_in": 1.1,
2516      "cost_per_1m_out": 4.4,
2517      "cost_per_1m_in_cached": 0,
2518      "cost_per_1m_out_cached": 0.275,
2519      "context_window": 200000,
2520      "default_max_tokens": 50000,
2521      "can_reason": true,
2522      "reasoning_levels": [
2523        "low",
2524        "medium",
2525        "high"
2526      ],
2527      "default_reasoning_effort": "medium",
2528      "supports_attachments": true
2529    },
2530    {
2531      "id": "openai/o4-mini-deep-research",
2532      "name": "OpenAI: o4 Mini Deep Research",
2533      "cost_per_1m_in": 2,
2534      "cost_per_1m_out": 8,
2535      "cost_per_1m_in_cached": 0,
2536      "cost_per_1m_out_cached": 0.5,
2537      "context_window": 200000,
2538      "default_max_tokens": 50000,
2539      "can_reason": true,
2540      "reasoning_levels": [
2541        "low",
2542        "medium",
2543        "high"
2544      ],
2545      "default_reasoning_effort": "medium",
2546      "supports_attachments": true
2547    },
2548    {
2549      "id": "openai/o4-mini-high",
2550      "name": "OpenAI: o4 Mini High",
2551      "cost_per_1m_in": 1.1,
2552      "cost_per_1m_out": 4.4,
2553      "cost_per_1m_in_cached": 0,
2554      "cost_per_1m_out_cached": 0.275,
2555      "context_window": 200000,
2556      "default_max_tokens": 50000,
2557      "can_reason": true,
2558      "reasoning_levels": [
2559        "low",
2560        "medium",
2561        "high"
2562      ],
2563      "default_reasoning_effort": "medium",
2564      "supports_attachments": true
2565    },
2566    {
2567      "id": "openrouter/owl-alpha",
2568      "name": "Owl Alpha",
2569      "cost_per_1m_in": 0,
2570      "cost_per_1m_out": 0,
2571      "cost_per_1m_in_cached": 0,
2572      "cost_per_1m_out_cached": 0,
2573      "context_window": 1048756,
2574      "default_max_tokens": 131072,
2575      "can_reason": false,
2576      "supports_attachments": false
2577    },
2578    {
2579      "id": "poolside/laguna-m.1:free",
2580      "name": "Poolside: Laguna M.1 (free)",
2581      "cost_per_1m_in": 0,
2582      "cost_per_1m_out": 0,
2583      "cost_per_1m_in_cached": 0,
2584      "cost_per_1m_out_cached": 0,
2585      "context_window": 131072,
2586      "default_max_tokens": 4096,
2587      "can_reason": true,
2588      "reasoning_levels": [
2589        "low",
2590        "medium",
2591        "high"
2592      ],
2593      "default_reasoning_effort": "medium",
2594      "supports_attachments": false
2595    },
2596    {
2597      "id": "poolside/laguna-xs.2:free",
2598      "name": "Poolside: Laguna XS.2 (free)",
2599      "cost_per_1m_in": 0,
2600      "cost_per_1m_out": 0,
2601      "cost_per_1m_in_cached": 0,
2602      "cost_per_1m_out_cached": 0,
2603      "context_window": 131072,
2604      "default_max_tokens": 4096,
2605      "can_reason": true,
2606      "reasoning_levels": [
2607        "low",
2608        "medium",
2609        "high"
2610      ],
2611      "default_reasoning_effort": "medium",
2612      "supports_attachments": false
2613    },
2614    {
2615      "id": "prime-intellect/intellect-3",
2616      "name": "Prime Intellect: INTELLECT-3",
2617      "cost_per_1m_in": 0.2,
2618      "cost_per_1m_out": 1.1,
2619      "cost_per_1m_in_cached": 0,
2620      "cost_per_1m_out_cached": 0,
2621      "context_window": 131072,
2622      "default_max_tokens": 65536,
2623      "can_reason": true,
2624      "reasoning_levels": [
2625        "low",
2626        "medium",
2627        "high"
2628      ],
2629      "default_reasoning_effort": "medium",
2630      "supports_attachments": false
2631    },
2632    {
2633      "id": "qwen/qwen-2.5-72b-instruct",
2634      "name": "Qwen2.5 72B Instruct",
2635      "cost_per_1m_in": 0.36,
2636      "cost_per_1m_out": 0.4,
2637      "cost_per_1m_in_cached": 0,
2638      "cost_per_1m_out_cached": 0,
2639      "context_window": 32768,
2640      "default_max_tokens": 8192,
2641      "can_reason": false,
2642      "supports_attachments": false
2643    },
2644    {
2645      "id": "qwen/qwen-plus-2025-07-28",
2646      "name": "Qwen: Qwen Plus 0728",
2647      "cost_per_1m_in": 0.26,
2648      "cost_per_1m_out": 0.78,
2649      "cost_per_1m_in_cached": 0.325,
2650      "cost_per_1m_out_cached": 0,
2651      "context_window": 1000000,
2652      "default_max_tokens": 16384,
2653      "can_reason": false,
2654      "supports_attachments": false
2655    },
2656    {
2657      "id": "qwen/qwen-plus-2025-07-28:thinking",
2658      "name": "Qwen: Qwen Plus 0728 (thinking)",
2659      "cost_per_1m_in": 0.26,
2660      "cost_per_1m_out": 0.78,
2661      "cost_per_1m_in_cached": 0.325,
2662      "cost_per_1m_out_cached": 0,
2663      "context_window": 1000000,
2664      "default_max_tokens": 16384,
2665      "can_reason": true,
2666      "reasoning_levels": [
2667        "low",
2668        "medium",
2669        "high"
2670      ],
2671      "default_reasoning_effort": "medium",
2672      "supports_attachments": false
2673    },
2674    {
2675      "id": "qwen/qwen-vl-max",
2676      "name": "Qwen: Qwen VL Max",
2677      "cost_per_1m_in": 0.52,
2678      "cost_per_1m_out": 2.08,
2679      "cost_per_1m_in_cached": 0,
2680      "cost_per_1m_out_cached": 0,
2681      "context_window": 131072,
2682      "default_max_tokens": 16384,
2683      "can_reason": false,
2684      "supports_attachments": true
2685    },
2686    {
2687      "id": "qwen/qwen-max",
2688      "name": "Qwen: Qwen-Max ",
2689      "cost_per_1m_in": 1.04,
2690      "cost_per_1m_out": 4.16,
2691      "cost_per_1m_in_cached": 0,
2692      "cost_per_1m_out_cached": 0.208,
2693      "context_window": 32768,
2694      "default_max_tokens": 4096,
2695      "can_reason": false,
2696      "supports_attachments": false
2697    },
2698    {
2699      "id": "qwen/qwen-plus",
2700      "name": "Qwen: Qwen-Plus",
2701      "cost_per_1m_in": 0.26,
2702      "cost_per_1m_out": 0.78,
2703      "cost_per_1m_in_cached": 0.325,
2704      "cost_per_1m_out_cached": 0.052,
2705      "context_window": 1000000,
2706      "default_max_tokens": 16384,
2707      "can_reason": false,
2708      "supports_attachments": false
2709    },
2710    {
2711      "id": "qwen/qwen-turbo",
2712      "name": "Qwen: Qwen-Turbo",
2713      "cost_per_1m_in": 0.0325,
2714      "cost_per_1m_out": 0.13,
2715      "cost_per_1m_in_cached": 0,
2716      "cost_per_1m_out_cached": 0.0065,
2717      "context_window": 131072,
2718      "default_max_tokens": 4096,
2719      "can_reason": false,
2720      "supports_attachments": false
2721    },
2722    {
2723      "id": "qwen/qwen-2.5-7b-instruct",
2724      "name": "Qwen: Qwen2.5 7B Instruct",
2725      "cost_per_1m_in": 0.04,
2726      "cost_per_1m_out": 0.1,
2727      "cost_per_1m_in_cached": 0,
2728      "cost_per_1m_out_cached": 0.04,
2729      "context_window": 32768,
2730      "default_max_tokens": 4096,
2731      "can_reason": false,
2732      "supports_attachments": false
2733    },
2734    {
2735      "id": "qwen/qwen3-14b",
2736      "name": "Qwen: Qwen3 14B",
2737      "cost_per_1m_in": 0.12,
2738      "cost_per_1m_out": 0.24,
2739      "cost_per_1m_in_cached": 0,
2740      "cost_per_1m_out_cached": 0,
2741      "context_window": 40960,
2742      "default_max_tokens": 8192,
2743      "can_reason": true,
2744      "reasoning_levels": [
2745        "low",
2746        "medium",
2747        "high"
2748      ],
2749      "default_reasoning_effort": "medium",
2750      "supports_attachments": false
2751    },
2752    {
2753      "id": "qwen/qwen3-235b-a22b",
2754      "name": "Qwen: Qwen3 235B A22B",
2755      "cost_per_1m_in": 0.455,
2756      "cost_per_1m_out": 1.82,
2757      "cost_per_1m_in_cached": 0,
2758      "cost_per_1m_out_cached": 0,
2759      "context_window": 131072,
2760      "default_max_tokens": 4096,
2761      "can_reason": true,
2762      "reasoning_levels": [
2763        "low",
2764        "medium",
2765        "high"
2766      ],
2767      "default_reasoning_effort": "medium",
2768      "supports_attachments": false
2769    },
2770    {
2771      "id": "qwen/qwen3-235b-a22b-2507",
2772      "name": "Qwen: Qwen3 235B A22B Instruct 2507",
2773      "cost_per_1m_in": 0.22,
2774      "cost_per_1m_out": 0.88,
2775      "cost_per_1m_in_cached": 0,
2776      "cost_per_1m_out_cached": 0,
2777      "context_window": 262144,
2778      "default_max_tokens": 8192,
2779      "can_reason": false,
2780      "supports_attachments": false
2781    },
2782    {
2783      "id": "qwen/qwen3-235b-a22b-thinking-2507",
2784      "name": "Qwen: Qwen3 235B A22B Thinking 2507",
2785      "cost_per_1m_in": 0.1495,
2786      "cost_per_1m_out": 1.495,
2787      "cost_per_1m_in_cached": 0,
2788      "cost_per_1m_out_cached": 0,
2789      "context_window": 131072,
2790      "default_max_tokens": 13107,
2791      "can_reason": true,
2792      "reasoning_levels": [
2793        "low",
2794        "medium",
2795        "high"
2796      ],
2797      "default_reasoning_effort": "medium",
2798      "supports_attachments": false
2799    },
2800    {
2801      "id": "qwen/qwen3-30b-a3b",
2802      "name": "Qwen: Qwen3 30B A3B",
2803      "cost_per_1m_in": 0.13,
2804      "cost_per_1m_out": 0.52,
2805      "cost_per_1m_in_cached": 0,
2806      "cost_per_1m_out_cached": 0,
2807      "context_window": 131072,
2808      "default_max_tokens": 4096,
2809      "can_reason": true,
2810      "reasoning_levels": [
2811        "low",
2812        "medium",
2813        "high"
2814      ],
2815      "default_reasoning_effort": "medium",
2816      "supports_attachments": false
2817    },
2818    {
2819      "id": "qwen/qwen3-30b-a3b-instruct-2507",
2820      "name": "Qwen: Qwen3 30B A3B Instruct 2507",
2821      "cost_per_1m_in": 0.1,
2822      "cost_per_1m_out": 0.3,
2823      "cost_per_1m_in_cached": 0,
2824      "cost_per_1m_out_cached": 0,
2825      "context_window": 262144,
2826      "default_max_tokens": 26214,
2827      "can_reason": false,
2828      "supports_attachments": false
2829    },
2830    {
2831      "id": "qwen/qwen3-30b-a3b-thinking-2507",
2832      "name": "Qwen: Qwen3 30B A3B Thinking 2507",
2833      "cost_per_1m_in": 0.08,
2834      "cost_per_1m_out": 0.4,
2835      "cost_per_1m_in_cached": 0,
2836      "cost_per_1m_out_cached": 0.08,
2837      "context_window": 131072,
2838      "default_max_tokens": 65536,
2839      "can_reason": true,
2840      "reasoning_levels": [
2841        "low",
2842        "medium",
2843        "high"
2844      ],
2845      "default_reasoning_effort": "medium",
2846      "supports_attachments": false
2847    },
2848    {
2849      "id": "qwen/qwen3-32b",
2850      "name": "Qwen: Qwen3 32B",
2851      "cost_per_1m_in": 0.104,
2852      "cost_per_1m_out": 0.416,
2853      "cost_per_1m_in_cached": 0,
2854      "cost_per_1m_out_cached": 0,
2855      "context_window": 131072,
2856      "default_max_tokens": 4096,
2857      "can_reason": true,
2858      "reasoning_levels": [
2859        "low",
2860        "medium",
2861        "high"
2862      ],
2863      "default_reasoning_effort": "medium",
2864      "supports_attachments": false
2865    },
2866    {
2867      "id": "qwen/qwen3-8b",
2868      "name": "Qwen: Qwen3 8B",
2869      "cost_per_1m_in": 0.117,
2870      "cost_per_1m_out": 0.455,
2871      "cost_per_1m_in_cached": 0,
2872      "cost_per_1m_out_cached": 0,
2873      "context_window": 131072,
2874      "default_max_tokens": 4096,
2875      "can_reason": true,
2876      "reasoning_levels": [
2877        "low",
2878        "medium",
2879        "high"
2880      ],
2881      "default_reasoning_effort": "medium",
2882      "supports_attachments": false
2883    },
2884    {
2885      "id": "qwen/qwen3-coder-30b-a3b-instruct",
2886      "name": "Qwen: Qwen3 Coder 30B A3B Instruct",
2887      "cost_per_1m_in": 0.07,
2888      "cost_per_1m_out": 0.27,
2889      "cost_per_1m_in_cached": 0,
2890      "cost_per_1m_out_cached": 0,
2891      "context_window": 160000,
2892      "default_max_tokens": 16384,
2893      "can_reason": false,
2894      "supports_attachments": false
2895    },
2896    {
2897      "id": "qwen/qwen3-coder",
2898      "name": "Qwen: Qwen3 Coder 480B A35B",
2899      "cost_per_1m_in": 0.22,
2900      "cost_per_1m_out": 1.8,
2901      "cost_per_1m_in_cached": 0,
2902      "cost_per_1m_out_cached": 0,
2903      "context_window": 262144,
2904      "default_max_tokens": 32768,
2905      "can_reason": false,
2906      "supports_attachments": false
2907    },
2908    {
2909      "id": "qwen/qwen3-coder:free",
2910      "name": "Qwen: Qwen3 Coder 480B A35B (free)",
2911      "cost_per_1m_in": 0,
2912      "cost_per_1m_out": 0,
2913      "cost_per_1m_in_cached": 0,
2914      "cost_per_1m_out_cached": 0,
2915      "context_window": 262000,
2916      "default_max_tokens": 131000,
2917      "can_reason": false,
2918      "supports_attachments": false
2919    },
2920    {
2921      "id": "qwen/qwen3-coder-flash",
2922      "name": "Qwen: Qwen3 Coder Flash",
2923      "cost_per_1m_in": 0.195,
2924      "cost_per_1m_out": 0.975,
2925      "cost_per_1m_in_cached": 0.24375,
2926      "cost_per_1m_out_cached": 0.039,
2927      "context_window": 1000000,
2928      "default_max_tokens": 32768,
2929      "can_reason": false,
2930      "supports_attachments": false
2931    },
2932    {
2933      "id": "qwen/qwen3-coder-next",
2934      "name": "Qwen: Qwen3 Coder Next",
2935      "cost_per_1m_in": 0.14,
2936      "cost_per_1m_out": 0.8,
2937      "cost_per_1m_in_cached": 0,
2938      "cost_per_1m_out_cached": 0.09,
2939      "context_window": 262144,
2940      "default_max_tokens": 131072,
2941      "can_reason": false,
2942      "supports_attachments": false
2943    },
2944    {
2945      "id": "qwen/qwen3-coder-plus",
2946      "name": "Qwen: Qwen3 Coder Plus",
2947      "cost_per_1m_in": 0.65,
2948      "cost_per_1m_out": 3.25,
2949      "cost_per_1m_in_cached": 0.8125,
2950      "cost_per_1m_out_cached": 0.13,
2951      "context_window": 1000000,
2952      "default_max_tokens": 32768,
2953      "can_reason": false,
2954      "supports_attachments": false
2955    },
2956    {
2957      "id": "qwen/qwen3-max",
2958      "name": "Qwen: Qwen3 Max",
2959      "cost_per_1m_in": 0.78,
2960      "cost_per_1m_out": 3.9,
2961      "cost_per_1m_in_cached": 0.975,
2962      "cost_per_1m_out_cached": 0.156,
2963      "context_window": 262144,
2964      "default_max_tokens": 16384,
2965      "can_reason": false,
2966      "supports_attachments": false
2967    },
2968    {
2969      "id": "qwen/qwen3-max-thinking",
2970      "name": "Qwen: Qwen3 Max Thinking",
2971      "cost_per_1m_in": 0.78,
2972      "cost_per_1m_out": 3.9,
2973      "cost_per_1m_in_cached": 0,
2974      "cost_per_1m_out_cached": 0,
2975      "context_window": 262144,
2976      "default_max_tokens": 16384,
2977      "can_reason": true,
2978      "reasoning_levels": [
2979        "low",
2980        "medium",
2981        "high"
2982      ],
2983      "default_reasoning_effort": "medium",
2984      "supports_attachments": false
2985    },
2986    {
2987      "id": "qwen/qwen3-next-80b-a3b-instruct",
2988      "name": "Qwen: Qwen3 Next 80B A3B Instruct",
2989      "cost_per_1m_in": 0.1,
2990      "cost_per_1m_out": 1.1,
2991      "cost_per_1m_in_cached": 0,
2992      "cost_per_1m_out_cached": 0.07,
2993      "context_window": 262144,
2994      "default_max_tokens": 131072,
2995      "can_reason": false,
2996      "supports_attachments": false
2997    },
2998    {
2999      "id": "qwen/qwen3-next-80b-a3b-instruct:free",
3000      "name": "Qwen: Qwen3 Next 80B A3B Instruct (free)",
3001      "cost_per_1m_in": 0,
3002      "cost_per_1m_out": 0,
3003      "cost_per_1m_in_cached": 0,
3004      "cost_per_1m_out_cached": 0,
3005      "context_window": 262144,
3006      "default_max_tokens": 26214,
3007      "can_reason": false,
3008      "supports_attachments": false
3009    },
3010    {
3011      "id": "qwen/qwen3-next-80b-a3b-thinking",
3012      "name": "Qwen: Qwen3 Next 80B A3B Thinking",
3013      "cost_per_1m_in": 0.0975,
3014      "cost_per_1m_out": 0.78,
3015      "cost_per_1m_in_cached": 0,
3016      "cost_per_1m_out_cached": 0,
3017      "context_window": 131072,
3018      "default_max_tokens": 16384,
3019      "can_reason": true,
3020      "reasoning_levels": [
3021        "low",
3022        "medium",
3023        "high"
3024      ],
3025      "default_reasoning_effort": "medium",
3026      "supports_attachments": false
3027    },
3028    {
3029      "id": "qwen/qwen3-vl-235b-a22b-instruct",
3030      "name": "Qwen: Qwen3 VL 235B A22B Instruct",
3031      "cost_per_1m_in": 0.2,
3032      "cost_per_1m_out": 0.88,
3033      "cost_per_1m_in_cached": 0,
3034      "cost_per_1m_out_cached": 0.11,
3035      "context_window": 262144,
3036      "default_max_tokens": 8192,
3037      "can_reason": false,
3038      "supports_attachments": true
3039    },
3040    {
3041      "id": "qwen/qwen3-vl-235b-a22b-thinking",
3042      "name": "Qwen: Qwen3 VL 235B A22B Thinking",
3043      "cost_per_1m_in": 0.26,
3044      "cost_per_1m_out": 2.6,
3045      "cost_per_1m_in_cached": 0,
3046      "cost_per_1m_out_cached": 0,
3047      "context_window": 131072,
3048      "default_max_tokens": 16384,
3049      "can_reason": true,
3050      "reasoning_levels": [
3051        "low",
3052        "medium",
3053        "high"
3054      ],
3055      "default_reasoning_effort": "medium",
3056      "supports_attachments": true
3057    },
3058    {
3059      "id": "qwen/qwen3-vl-30b-a3b-instruct",
3060      "name": "Qwen: Qwen3 VL 30B A3B Instruct",
3061      "cost_per_1m_in": 0.15,
3062      "cost_per_1m_out": 0.6,
3063      "cost_per_1m_in_cached": 0,
3064      "cost_per_1m_out_cached": 0,
3065      "context_window": 262144,
3066      "default_max_tokens": 8192,
3067      "can_reason": false,
3068      "supports_attachments": true
3069    },
3070    {
3071      "id": "qwen/qwen3-vl-30b-a3b-thinking",
3072      "name": "Qwen: Qwen3 VL 30B A3B Thinking",
3073      "cost_per_1m_in": 0.13,
3074      "cost_per_1m_out": 1.56,
3075      "cost_per_1m_in_cached": 0,
3076      "cost_per_1m_out_cached": 0,
3077      "context_window": 131072,
3078      "default_max_tokens": 16384,
3079      "can_reason": true,
3080      "reasoning_levels": [
3081        "low",
3082        "medium",
3083        "high"
3084      ],
3085      "default_reasoning_effort": "medium",
3086      "supports_attachments": true
3087    },
3088    {
3089      "id": "qwen/qwen3-vl-32b-instruct",
3090      "name": "Qwen: Qwen3 VL 32B Instruct",
3091      "cost_per_1m_in": 0.104,
3092      "cost_per_1m_out": 0.416,
3093      "cost_per_1m_in_cached": 0,
3094      "cost_per_1m_out_cached": 0,
3095      "context_window": 131072,
3096      "default_max_tokens": 16384,
3097      "can_reason": false,
3098      "supports_attachments": true
3099    },
3100    {
3101      "id": "qwen/qwen3-vl-8b-instruct",
3102      "name": "Qwen: Qwen3 VL 8B Instruct",
3103      "cost_per_1m_in": 0.25,
3104      "cost_per_1m_out": 0.75,
3105      "cost_per_1m_in_cached": 0,
3106      "cost_per_1m_out_cached": 0.12,
3107      "context_window": 262144,
3108      "default_max_tokens": 131072,
3109      "can_reason": false,
3110      "supports_attachments": true
3111    },
3112    {
3113      "id": "qwen/qwen3-vl-8b-thinking",
3114      "name": "Qwen: Qwen3 VL 8B Thinking",
3115      "cost_per_1m_in": 0.117,
3116      "cost_per_1m_out": 1.365,
3117      "cost_per_1m_in_cached": 0,
3118      "cost_per_1m_out_cached": 0,
3119      "context_window": 131072,
3120      "default_max_tokens": 16384,
3121      "can_reason": true,
3122      "reasoning_levels": [
3123        "low",
3124        "medium",
3125        "high"
3126      ],
3127      "default_reasoning_effort": "medium",
3128      "supports_attachments": true
3129    },
3130    {
3131      "id": "qwen/qwen3.5-397b-a17b",
3132      "name": "Qwen: Qwen3.5 397B A17B",
3133      "cost_per_1m_in": 0.6,
3134      "cost_per_1m_out": 3.6,
3135      "cost_per_1m_in_cached": 0,
3136      "cost_per_1m_out_cached": 0,
3137      "context_window": 262144,
3138      "default_max_tokens": 32768,
3139      "can_reason": true,
3140      "reasoning_levels": [
3141        "low",
3142        "medium",
3143        "high"
3144      ],
3145      "default_reasoning_effort": "medium",
3146      "supports_attachments": true
3147    },
3148    {
3149      "id": "qwen/qwen3.5-plus-02-15",
3150      "name": "Qwen: Qwen3.5 Plus 2026-02-15",
3151      "cost_per_1m_in": 0.26,
3152      "cost_per_1m_out": 1.56,
3153      "cost_per_1m_in_cached": 0.325,
3154      "cost_per_1m_out_cached": 0,
3155      "context_window": 1000000,
3156      "default_max_tokens": 32768,
3157      "can_reason": true,
3158      "reasoning_levels": [
3159        "low",
3160        "medium",
3161        "high"
3162      ],
3163      "default_reasoning_effort": "medium",
3164      "supports_attachments": true
3165    },
3166    {
3167      "id": "qwen/qwen3.5-plus-20260420",
3168      "name": "Qwen: Qwen3.5 Plus 2026-04-20",
3169      "cost_per_1m_in": 0.4,
3170      "cost_per_1m_out": 2.4,
3171      "cost_per_1m_in_cached": 0,
3172      "cost_per_1m_out_cached": 0,
3173      "context_window": 1000000,
3174      "default_max_tokens": 32768,
3175      "can_reason": true,
3176      "reasoning_levels": [
3177        "low",
3178        "medium",
3179        "high"
3180      ],
3181      "default_reasoning_effort": "medium",
3182      "supports_attachments": true
3183    },
3184    {
3185      "id": "qwen/qwen3.5-122b-a10b",
3186      "name": "Qwen: Qwen3.5-122B-A10B",
3187      "cost_per_1m_in": 0.26,
3188      "cost_per_1m_out": 2.08,
3189      "cost_per_1m_in_cached": 0,
3190      "cost_per_1m_out_cached": 0,
3191      "context_window": 262144,
3192      "default_max_tokens": 32768,
3193      "can_reason": true,
3194      "reasoning_levels": [
3195        "low",
3196        "medium",
3197        "high"
3198      ],
3199      "default_reasoning_effort": "medium",
3200      "supports_attachments": true
3201    },
3202    {
3203      "id": "qwen/qwen3.5-27b",
3204      "name": "Qwen: Qwen3.5-27B",
3205      "cost_per_1m_in": 0.3,
3206      "cost_per_1m_out": 2.4,
3207      "cost_per_1m_in_cached": 0,
3208      "cost_per_1m_out_cached": 0,
3209      "context_window": 262144,
3210      "default_max_tokens": 32768,
3211      "can_reason": true,
3212      "reasoning_levels": [
3213        "low",
3214        "medium",
3215        "high"
3216      ],
3217      "default_reasoning_effort": "medium",
3218      "supports_attachments": true
3219    },
3220    {
3221      "id": "qwen/qwen3.5-35b-a3b",
3222      "name": "Qwen: Qwen3.5-35B-A3B",
3223      "cost_per_1m_in": 0.15,
3224      "cost_per_1m_out": 1,
3225      "cost_per_1m_in_cached": 0,
3226      "cost_per_1m_out_cached": 0.05,
3227      "context_window": 262144,
3228      "default_max_tokens": 131072,
3229      "can_reason": true,
3230      "reasoning_levels": [
3231        "low",
3232        "medium",
3233        "high"
3234      ],
3235      "default_reasoning_effort": "medium",
3236      "supports_attachments": true
3237    },
3238    {
3239      "id": "qwen/qwen3.5-9b",
3240      "name": "Qwen: Qwen3.5-9B",
3241      "cost_per_1m_in": 0.1,
3242      "cost_per_1m_out": 0.15,
3243      "cost_per_1m_in_cached": 0,
3244      "cost_per_1m_out_cached": 0,
3245      "context_window": 256000,
3246      "default_max_tokens": 16384,
3247      "can_reason": true,
3248      "reasoning_levels": [
3249        "low",
3250        "medium",
3251        "high"
3252      ],
3253      "default_reasoning_effort": "medium",
3254      "supports_attachments": true
3255    },
3256    {
3257      "id": "qwen/qwen3.5-flash-02-23",
3258      "name": "Qwen: Qwen3.5-Flash",
3259      "cost_per_1m_in": 0.065,
3260      "cost_per_1m_out": 0.26,
3261      "cost_per_1m_in_cached": 0.08125,
3262      "cost_per_1m_out_cached": 0,
3263      "context_window": 1000000,
3264      "default_max_tokens": 32768,
3265      "can_reason": true,
3266      "reasoning_levels": [
3267        "low",
3268        "medium",
3269        "high"
3270      ],
3271      "default_reasoning_effort": "medium",
3272      "supports_attachments": true
3273    },
3274    {
3275      "id": "qwen/qwen3.6-27b",
3276      "name": "Qwen: Qwen3.6 27B",
3277      "cost_per_1m_in": 0.5,
3278      "cost_per_1m_out": 2,
3279      "cost_per_1m_in_cached": 0,
3280      "cost_per_1m_out_cached": 0.25,
3281      "context_window": 262144,
3282      "default_max_tokens": 32768,
3283      "can_reason": true,
3284      "reasoning_levels": [
3285        "low",
3286        "medium",
3287        "high"
3288      ],
3289      "default_reasoning_effort": "medium",
3290      "supports_attachments": true
3291    },
3292    {
3293      "id": "qwen/qwen3.6-35b-a3b",
3294      "name": "Qwen: Qwen3.6 35B A3B",
3295      "cost_per_1m_in": 0.15,
3296      "cost_per_1m_out": 1,
3297      "cost_per_1m_in_cached": 0,
3298      "cost_per_1m_out_cached": 0.05,
3299      "context_window": 262144,
3300      "default_max_tokens": 131072,
3301      "can_reason": true,
3302      "reasoning_levels": [
3303        "low",
3304        "medium",
3305        "high"
3306      ],
3307      "default_reasoning_effort": "medium",
3308      "supports_attachments": true
3309    },
3310    {
3311      "id": "qwen/qwen3.6-flash",
3312      "name": "Qwen: Qwen3.6 Flash",
3313      "cost_per_1m_in": 0.25,
3314      "cost_per_1m_out": 1.5,
3315      "cost_per_1m_in_cached": 0.3125,
3316      "cost_per_1m_out_cached": 0,
3317      "context_window": 1000000,
3318      "default_max_tokens": 32768,
3319      "can_reason": true,
3320      "reasoning_levels": [
3321        "low",
3322        "medium",
3323        "high"
3324      ],
3325      "default_reasoning_effort": "medium",
3326      "supports_attachments": true
3327    },
3328    {
3329      "id": "qwen/qwen3.6-max-preview",
3330      "name": "Qwen: Qwen3.6 Max Preview",
3331      "cost_per_1m_in": 1.04,
3332      "cost_per_1m_out": 6.24,
3333      "cost_per_1m_in_cached": 1.3,
3334      "cost_per_1m_out_cached": 0,
3335      "context_window": 262144,
3336      "default_max_tokens": 32768,
3337      "can_reason": true,
3338      "reasoning_levels": [
3339        "low",
3340        "medium",
3341        "high"
3342      ],
3343      "default_reasoning_effort": "medium",
3344      "supports_attachments": false
3345    },
3346    {
3347      "id": "qwen/qwen3.6-plus",
3348      "name": "Qwen: Qwen3.6 Plus",
3349      "cost_per_1m_in": 0.325,
3350      "cost_per_1m_out": 1.95,
3351      "cost_per_1m_in_cached": 0.40625,
3352      "cost_per_1m_out_cached": 0,
3353      "context_window": 1000000,
3354      "default_max_tokens": 32768,
3355      "can_reason": true,
3356      "reasoning_levels": [
3357        "low",
3358        "medium",
3359        "high"
3360      ],
3361      "default_reasoning_effort": "medium",
3362      "supports_attachments": true
3363    },
3364    {
3365      "id": "relace/relace-search",
3366      "name": "Relace: Relace Search",
3367      "cost_per_1m_in": 1,
3368      "cost_per_1m_out": 3,
3369      "cost_per_1m_in_cached": 0,
3370      "cost_per_1m_out_cached": 0,
3371      "context_window": 256000,
3372      "default_max_tokens": 64000,
3373      "can_reason": false,
3374      "supports_attachments": false
3375    },
3376    {
3377      "id": "stepfun/step-3.5-flash",
3378      "name": "StepFun: Step 3.5 Flash",
3379      "cost_per_1m_in": 0.1,
3380      "cost_per_1m_out": 0.3,
3381      "cost_per_1m_in_cached": 0,
3382      "cost_per_1m_out_cached": 0,
3383      "context_window": 262144,
3384      "default_max_tokens": 32768,
3385      "can_reason": true,
3386      "reasoning_levels": [
3387        "low",
3388        "medium",
3389        "high"
3390      ],
3391      "default_reasoning_effort": "medium",
3392      "supports_attachments": false
3393    },
3394    {
3395      "id": "tngtech/deepseek-r1t2-chimera",
3396      "name": "TNG: DeepSeek R1T2 Chimera",
3397      "cost_per_1m_in": 0.3,
3398      "cost_per_1m_out": 1.1,
3399      "cost_per_1m_in_cached": 0,
3400      "cost_per_1m_out_cached": 0.15,
3401      "context_window": 163840,
3402      "default_max_tokens": 81920,
3403      "can_reason": true,
3404      "reasoning_levels": [
3405        "low",
3406        "medium",
3407        "high"
3408      ],
3409      "default_reasoning_effort": "medium",
3410      "supports_attachments": false
3411    },
3412    {
3413      "id": "tencent/hy3-preview:free",
3414      "name": "Tencent: Hy3 preview (free)",
3415      "cost_per_1m_in": 0,
3416      "cost_per_1m_out": 0,
3417      "cost_per_1m_in_cached": 0,
3418      "cost_per_1m_out_cached": 0,
3419      "context_window": 262144,
3420      "default_max_tokens": 131072,
3421      "can_reason": true,
3422      "reasoning_levels": [
3423        "low",
3424        "medium",
3425        "high"
3426      ],
3427      "default_reasoning_effort": "medium",
3428      "supports_attachments": false
3429    },
3430    {
3431      "id": "thedrummer/rocinante-12b",
3432      "name": "TheDrummer: Rocinante 12B",
3433      "cost_per_1m_in": 0.17,
3434      "cost_per_1m_out": 0.43,
3435      "cost_per_1m_in_cached": 0,
3436      "cost_per_1m_out_cached": 0,
3437      "context_window": 32768,
3438      "default_max_tokens": 16384,
3439      "can_reason": false,
3440      "supports_attachments": false
3441    },
3442    {
3443      "id": "thedrummer/unslopnemo-12b",
3444      "name": "TheDrummer: UnslopNemo 12B",
3445      "cost_per_1m_in": 0.4,
3446      "cost_per_1m_out": 0.4,
3447      "cost_per_1m_in_cached": 0,
3448      "cost_per_1m_out_cached": 0,
3449      "context_window": 32768,
3450      "default_max_tokens": 16384,
3451      "can_reason": false,
3452      "supports_attachments": false
3453    },
3454    {
3455      "id": "alibaba/tongyi-deepresearch-30b-a3b",
3456      "name": "Tongyi DeepResearch 30B A3B",
3457      "cost_per_1m_in": 0.09,
3458      "cost_per_1m_out": 0.45,
3459      "cost_per_1m_in_cached": 0,
3460      "cost_per_1m_out_cached": 0.09,
3461      "context_window": 131072,
3462      "default_max_tokens": 65536,
3463      "can_reason": true,
3464      "reasoning_levels": [
3465        "low",
3466        "medium",
3467        "high"
3468      ],
3469      "default_reasoning_effort": "medium",
3470      "supports_attachments": false
3471    },
3472    {
3473      "id": "upstage/solar-pro-3",
3474      "name": "Upstage: Solar Pro 3",
3475      "cost_per_1m_in": 0.15,
3476      "cost_per_1m_out": 0.6,
3477      "cost_per_1m_in_cached": 0,
3478      "cost_per_1m_out_cached": 0.015,
3479      "context_window": 128000,
3480      "default_max_tokens": 12800,
3481      "can_reason": true,
3482      "reasoning_levels": [
3483        "low",
3484        "medium",
3485        "high"
3486      ],
3487      "default_reasoning_effort": "medium",
3488      "supports_attachments": false
3489    },
3490    {
3491      "id": "xiaomi/mimo-v2-flash",
3492      "name": "Xiaomi: MiMo-V2-Flash",
3493      "cost_per_1m_in": 0.1,
3494      "cost_per_1m_out": 0.3,
3495      "cost_per_1m_in_cached": 0,
3496      "cost_per_1m_out_cached": 0.02,
3497      "context_window": 262144,
3498      "default_max_tokens": 16000,
3499      "can_reason": true,
3500      "reasoning_levels": [
3501        "low",
3502        "medium",
3503        "high"
3504      ],
3505      "default_reasoning_effort": "medium",
3506      "supports_attachments": false
3507    },
3508    {
3509      "id": "xiaomi/mimo-v2-omni",
3510      "name": "Xiaomi: MiMo-V2-Omni",
3511      "cost_per_1m_in": 0.4,
3512      "cost_per_1m_out": 2,
3513      "cost_per_1m_in_cached": 0,
3514      "cost_per_1m_out_cached": 0.08,
3515      "context_window": 262144,
3516      "default_max_tokens": 32768,
3517      "can_reason": true,
3518      "reasoning_levels": [
3519        "low",
3520        "medium",
3521        "high"
3522      ],
3523      "default_reasoning_effort": "medium",
3524      "supports_attachments": true
3525    },
3526    {
3527      "id": "xiaomi/mimo-v2-pro",
3528      "name": "Xiaomi: MiMo-V2-Pro",
3529      "cost_per_1m_in": 1,
3530      "cost_per_1m_out": 3,
3531      "cost_per_1m_in_cached": 0,
3532      "cost_per_1m_out_cached": 0.2,
3533      "context_window": 1048576,
3534      "default_max_tokens": 65536,
3535      "can_reason": true,
3536      "reasoning_levels": [
3537        "low",
3538        "medium",
3539        "high"
3540      ],
3541      "default_reasoning_effort": "medium",
3542      "supports_attachments": false
3543    },
3544    {
3545      "id": "xiaomi/mimo-v2.5",
3546      "name": "Xiaomi: MiMo-V2.5",
3547      "cost_per_1m_in": 0.4,
3548      "cost_per_1m_out": 2,
3549      "cost_per_1m_in_cached": 0,
3550      "cost_per_1m_out_cached": 0.08,
3551      "context_window": 1048576,
3552      "default_max_tokens": 65536,
3553      "can_reason": true,
3554      "reasoning_levels": [
3555        "low",
3556        "medium",
3557        "high"
3558      ],
3559      "default_reasoning_effort": "medium",
3560      "supports_attachments": true
3561    },
3562    {
3563      "id": "xiaomi/mimo-v2.5-pro",
3564      "name": "Xiaomi: MiMo-V2.5-Pro",
3565      "cost_per_1m_in": 1,
3566      "cost_per_1m_out": 3,
3567      "cost_per_1m_in_cached": 0,
3568      "cost_per_1m_out_cached": 0.2,
3569      "context_window": 1048576,
3570      "default_max_tokens": 65536,
3571      "can_reason": true,
3572      "reasoning_levels": [
3573        "low",
3574        "medium",
3575        "high"
3576      ],
3577      "default_reasoning_effort": "medium",
3578      "supports_attachments": false
3579    },
3580    {
3581      "id": "z-ai/glm-4-32b",
3582      "name": "Z.ai: GLM 4 32B ",
3583      "cost_per_1m_in": 0.1,
3584      "cost_per_1m_out": 0.1,
3585      "cost_per_1m_in_cached": 0,
3586      "cost_per_1m_out_cached": 0,
3587      "context_window": 128000,
3588      "default_max_tokens": 12800,
3589      "can_reason": false,
3590      "supports_attachments": false
3591    },
3592    {
3593      "id": "z-ai/glm-4.5",
3594      "name": "Z.ai: GLM 4.5",
3595      "cost_per_1m_in": 0.6,
3596      "cost_per_1m_out": 2.2,
3597      "cost_per_1m_in_cached": 0,
3598      "cost_per_1m_out_cached": 0.11,
3599      "context_window": 131072,
3600      "default_max_tokens": 48000,
3601      "can_reason": true,
3602      "reasoning_levels": [
3603        "low",
3604        "medium",
3605        "high"
3606      ],
3607      "default_reasoning_effort": "medium",
3608      "supports_attachments": false
3609    },
3610    {
3611      "id": "z-ai/glm-4.5-air",
3612      "name": "Z.ai: GLM 4.5 Air",
3613      "cost_per_1m_in": 0.13,
3614      "cost_per_1m_out": 0.85,
3615      "cost_per_1m_in_cached": 0,
3616      "cost_per_1m_out_cached": 0.025,
3617      "context_window": 131072,
3618      "default_max_tokens": 49152,
3619      "can_reason": true,
3620      "reasoning_levels": [
3621        "low",
3622        "medium",
3623        "high"
3624      ],
3625      "default_reasoning_effort": "medium",
3626      "supports_attachments": false
3627    },
3628    {
3629      "id": "z-ai/glm-4.5-air:free",
3630      "name": "Z.ai: GLM 4.5 Air (free)",
3631      "cost_per_1m_in": 0,
3632      "cost_per_1m_out": 0,
3633      "cost_per_1m_in_cached": 0,
3634      "cost_per_1m_out_cached": 0,
3635      "context_window": 131072,
3636      "default_max_tokens": 48000,
3637      "can_reason": true,
3638      "reasoning_levels": [
3639        "low",
3640        "medium",
3641        "high"
3642      ],
3643      "default_reasoning_effort": "medium",
3644      "supports_attachments": false
3645    },
3646    {
3647      "id": "z-ai/glm-4.5v",
3648      "name": "Z.ai: GLM 4.5V",
3649      "cost_per_1m_in": 0.6,
3650      "cost_per_1m_out": 1.8,
3651      "cost_per_1m_in_cached": 0,
3652      "cost_per_1m_out_cached": 0.11,
3653      "context_window": 65536,
3654      "default_max_tokens": 8192,
3655      "can_reason": true,
3656      "reasoning_levels": [
3657        "low",
3658        "medium",
3659        "high"
3660      ],
3661      "default_reasoning_effort": "medium",
3662      "supports_attachments": true
3663    },
3664    {
3665      "id": "z-ai/glm-4.6",
3666      "name": "Z.ai: GLM 4.6",
3667      "cost_per_1m_in": 0.39,
3668      "cost_per_1m_out": 1.9,
3669      "cost_per_1m_in_cached": 0,
3670      "cost_per_1m_out_cached": 0,
3671      "context_window": 204800,
3672      "default_max_tokens": 102400,
3673      "can_reason": true,
3674      "reasoning_levels": [
3675        "low",
3676        "medium",
3677        "high"
3678      ],
3679      "default_reasoning_effort": "medium",
3680      "supports_attachments": false
3681    },
3682    {
3683      "id": "z-ai/glm-4.6v",
3684      "name": "Z.ai: GLM 4.6V",
3685      "cost_per_1m_in": 0.3,
3686      "cost_per_1m_out": 0.9,
3687      "cost_per_1m_in_cached": 0,
3688      "cost_per_1m_out_cached": 0.05,
3689      "context_window": 131072,
3690      "default_max_tokens": 12000,
3691      "can_reason": true,
3692      "reasoning_levels": [
3693        "low",
3694        "medium",
3695        "high"
3696      ],
3697      "default_reasoning_effort": "medium",
3698      "supports_attachments": true
3699    },
3700    {
3701      "id": "z-ai/glm-4.7",
3702      "name": "Z.ai: GLM 4.7",
3703      "cost_per_1m_in": 0.38,
3704      "cost_per_1m_out": 1.74,
3705      "cost_per_1m_in_cached": 0,
3706      "cost_per_1m_out_cached": 0,
3707      "context_window": 202752,
3708      "default_max_tokens": 20275,
3709      "can_reason": true,
3710      "reasoning_levels": [
3711        "low",
3712        "medium",
3713        "high"
3714      ],
3715      "default_reasoning_effort": "medium",
3716      "supports_attachments": false
3717    },
3718    {
3719      "id": "z-ai/glm-4.7-flash",
3720      "name": "Z.ai: GLM 4.7 Flash",
3721      "cost_per_1m_in": 0.06,
3722      "cost_per_1m_out": 0.4,
3723      "cost_per_1m_in_cached": 0,
3724      "cost_per_1m_out_cached": 0.01,
3725      "context_window": 202752,
3726      "default_max_tokens": 8192,
3727      "can_reason": true,
3728      "reasoning_levels": [
3729        "low",
3730        "medium",
3731        "high"
3732      ],
3733      "default_reasoning_effort": "medium",
3734      "supports_attachments": false
3735    },
3736    {
3737      "id": "z-ai/glm-5",
3738      "name": "Z.ai: GLM 5",
3739      "cost_per_1m_in": 0.95,
3740      "cost_per_1m_out": 2.55,
3741      "cost_per_1m_in_cached": 0,
3742      "cost_per_1m_out_cached": 0.2,
3743      "context_window": 204800,
3744      "default_max_tokens": 65536,
3745      "can_reason": true,
3746      "reasoning_levels": [
3747        "low",
3748        "medium",
3749        "high"
3750      ],
3751      "default_reasoning_effort": "medium",
3752      "supports_attachments": false
3753    },
3754    {
3755      "id": "z-ai/glm-5-turbo",
3756      "name": "Z.ai: GLM 5 Turbo",
3757      "cost_per_1m_in": 1.2,
3758      "cost_per_1m_out": 4,
3759      "cost_per_1m_in_cached": 0,
3760      "cost_per_1m_out_cached": 0.24,
3761      "context_window": 262144,
3762      "default_max_tokens": 65536,
3763      "can_reason": true,
3764      "reasoning_levels": [
3765        "low",
3766        "medium",
3767        "high"
3768      ],
3769      "default_reasoning_effort": "medium",
3770      "supports_attachments": false
3771    },
3772    {
3773      "id": "z-ai/glm-5.1",
3774      "name": "Z.ai: GLM 5.1",
3775      "cost_per_1m_in": 1.4,
3776      "cost_per_1m_out": 4.4,
3777      "cost_per_1m_in_cached": 0,
3778      "cost_per_1m_out_cached": 0.26,
3779      "context_window": 204800,
3780      "default_max_tokens": 65536,
3781      "can_reason": true,
3782      "reasoning_levels": [
3783        "low",
3784        "medium",
3785        "high"
3786      ],
3787      "default_reasoning_effort": "medium",
3788      "supports_attachments": false
3789    },
3790    {
3791      "id": "z-ai/glm-5v-turbo",
3792      "name": "Z.ai: GLM 5V Turbo",
3793      "cost_per_1m_in": 1.2,
3794      "cost_per_1m_out": 4,
3795      "cost_per_1m_in_cached": 0,
3796      "cost_per_1m_out_cached": 0.24,
3797      "context_window": 202752,
3798      "default_max_tokens": 65536,
3799      "can_reason": true,
3800      "reasoning_levels": [
3801        "low",
3802        "medium",
3803        "high"
3804      ],
3805      "default_reasoning_effort": "medium",
3806      "supports_attachments": true
3807    },
3808    {
3809      "id": "inclusionai/ling-2.6-1t:free",
3810      "name": "inclusionAI: Ling-2.6-1T (free)",
3811      "cost_per_1m_in": 0,
3812      "cost_per_1m_out": 0,
3813      "cost_per_1m_in_cached": 0,
3814      "cost_per_1m_out_cached": 0,
3815      "context_window": 262144,
3816      "default_max_tokens": 16384,
3817      "can_reason": false,
3818      "supports_attachments": false
3819    },
3820    {
3821      "id": "inclusionai/ling-2.6-flash",
3822      "name": "inclusionAI: Ling-2.6-flash",
3823      "cost_per_1m_in": 0.08,
3824      "cost_per_1m_out": 0.24,
3825      "cost_per_1m_in_cached": 0,
3826      "cost_per_1m_out_cached": 0.016,
3827      "context_window": 262144,
3828      "default_max_tokens": 16384,
3829      "can_reason": false,
3830      "supports_attachments": false
3831    },
3832    {
3833      "id": "x-ai/grok-3",
3834      "name": "xAI: Grok 3",
3835      "cost_per_1m_in": 3,
3836      "cost_per_1m_out": 15,
3837      "cost_per_1m_in_cached": 0,
3838      "cost_per_1m_out_cached": 0.75,
3839      "context_window": 131072,
3840      "default_max_tokens": 13107,
3841      "can_reason": false,
3842      "supports_attachments": false
3843    },
3844    {
3845      "id": "x-ai/grok-3-beta",
3846      "name": "xAI: Grok 3 Beta",
3847      "cost_per_1m_in": 3,
3848      "cost_per_1m_out": 15,
3849      "cost_per_1m_in_cached": 0,
3850      "cost_per_1m_out_cached": 0.75,
3851      "context_window": 131072,
3852      "default_max_tokens": 13107,
3853      "can_reason": false,
3854      "supports_attachments": false
3855    },
3856    {
3857      "id": "x-ai/grok-3-mini",
3858      "name": "xAI: Grok 3 Mini",
3859      "cost_per_1m_in": 0.3,
3860      "cost_per_1m_out": 0.5,
3861      "cost_per_1m_in_cached": 0,
3862      "cost_per_1m_out_cached": 0.075,
3863      "context_window": 131072,
3864      "default_max_tokens": 13107,
3865      "can_reason": true,
3866      "reasoning_levels": [
3867        "low",
3868        "medium",
3869        "high"
3870      ],
3871      "default_reasoning_effort": "medium",
3872      "supports_attachments": false
3873    },
3874    {
3875      "id": "x-ai/grok-3-mini-beta",
3876      "name": "xAI: Grok 3 Mini Beta",
3877      "cost_per_1m_in": 0.3,
3878      "cost_per_1m_out": 0.5,
3879      "cost_per_1m_in_cached": 0,
3880      "cost_per_1m_out_cached": 0.075,
3881      "context_window": 131072,
3882      "default_max_tokens": 13107,
3883      "can_reason": true,
3884      "reasoning_levels": [
3885        "low",
3886        "medium",
3887        "high"
3888      ],
3889      "default_reasoning_effort": "medium",
3890      "supports_attachments": false
3891    },
3892    {
3893      "id": "x-ai/grok-4",
3894      "name": "xAI: Grok 4",
3895      "cost_per_1m_in": 3,
3896      "cost_per_1m_out": 15,
3897      "cost_per_1m_in_cached": 0,
3898      "cost_per_1m_out_cached": 0.75,
3899      "context_window": 256000,
3900      "default_max_tokens": 25600,
3901      "can_reason": true,
3902      "reasoning_levels": [
3903        "low",
3904        "medium",
3905        "high"
3906      ],
3907      "default_reasoning_effort": "medium",
3908      "supports_attachments": true
3909    },
3910    {
3911      "id": "x-ai/grok-4-fast",
3912      "name": "xAI: Grok 4 Fast",
3913      "cost_per_1m_in": 0.2,
3914      "cost_per_1m_out": 0.5,
3915      "cost_per_1m_in_cached": 0,
3916      "cost_per_1m_out_cached": 0.05,
3917      "context_window": 2000000,
3918      "default_max_tokens": 15000,
3919      "can_reason": true,
3920      "reasoning_levels": [
3921        "low",
3922        "medium",
3923        "high"
3924      ],
3925      "default_reasoning_effort": "medium",
3926      "supports_attachments": true
3927    },
3928    {
3929      "id": "x-ai/grok-4.1-fast",
3930      "name": "xAI: Grok 4.1 Fast",
3931      "cost_per_1m_in": 0.2,
3932      "cost_per_1m_out": 0.5,
3933      "cost_per_1m_in_cached": 0,
3934      "cost_per_1m_out_cached": 0.05,
3935      "context_window": 2000000,
3936      "default_max_tokens": 15000,
3937      "can_reason": true,
3938      "reasoning_levels": [
3939        "low",
3940        "medium",
3941        "high"
3942      ],
3943      "default_reasoning_effort": "medium",
3944      "supports_attachments": true
3945    },
3946    {
3947      "id": "x-ai/grok-4.20",
3948      "name": "xAI: Grok 4.20",
3949      "cost_per_1m_in": 1.25,
3950      "cost_per_1m_out": 2.5,
3951      "cost_per_1m_in_cached": 0,
3952      "cost_per_1m_out_cached": 0.2,
3953      "context_window": 2000000,
3954      "default_max_tokens": 200000,
3955      "can_reason": true,
3956      "reasoning_levels": [
3957        "low",
3958        "medium",
3959        "high"
3960      ],
3961      "default_reasoning_effort": "medium",
3962      "supports_attachments": true
3963    },
3964    {
3965      "id": "x-ai/grok-4.3",
3966      "name": "xAI: Grok 4.3",
3967      "cost_per_1m_in": 1.25,
3968      "cost_per_1m_out": 2.5,
3969      "cost_per_1m_in_cached": 0,
3970      "cost_per_1m_out_cached": 0.2,
3971      "context_window": 1000000,
3972      "default_max_tokens": 100000,
3973      "can_reason": true,
3974      "reasoning_levels": [
3975        "low",
3976        "medium",
3977        "high"
3978      ],
3979      "default_reasoning_effort": "medium",
3980      "supports_attachments": true
3981    },
3982    {
3983      "id": "x-ai/grok-code-fast-1",
3984      "name": "xAI: Grok Code Fast 1",
3985      "cost_per_1m_in": 0.2,
3986      "cost_per_1m_out": 1.5,
3987      "cost_per_1m_in_cached": 0,
3988      "cost_per_1m_out_cached": 0.02,
3989      "context_window": 256000,
3990      "default_max_tokens": 5000,
3991      "can_reason": true,
3992      "reasoning_levels": [
3993        "low",
3994        "medium",
3995        "high"
3996      ],
3997      "default_reasoning_effort": "medium",
3998      "supports_attachments": false
3999    }
4000  ],
4001  "default_headers": {
4002    "HTTP-Referer": "https://charm.land",
4003    "X-Title": "Crush"
4004  }
4005}