openrouter.json

   1{
   2  "name": "OpenRouter",
   3  "id": "openrouter",
   4  "api_key": "$OPENROUTER_API_KEY",
   5  "api_endpoint": "https://openrouter.ai/api/v1",
   6  "type": "openrouter",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-3.5-haiku",
   9  "models": [
  10    {
  11      "id": "ai21/jamba-large-1.7",
  12      "name": "AI21: Jamba Large 1.7",
  13      "cost_per_1m_in": 2,
  14      "cost_per_1m_out": 8,
  15      "cost_per_1m_in_cached": 0,
  16      "cost_per_1m_out_cached": 0,
  17      "context_window": 256000,
  18      "default_max_tokens": 2048,
  19      "can_reason": false,
  20      "supports_attachments": false
  21    },
  22    {
  23      "id": "allenai/olmo-3.1-32b-instruct",
  24      "name": "AllenAI: Olmo 3.1 32B Instruct",
  25      "cost_per_1m_in": 0.2,
  26      "cost_per_1m_out": 0.6,
  27      "cost_per_1m_in_cached": 0,
  28      "cost_per_1m_out_cached": 0,
  29      "context_window": 65536,
  30      "default_max_tokens": 6553,
  31      "can_reason": false,
  32      "supports_attachments": false
  33    },
  34    {
  35      "id": "amazon/nova-2-lite-v1",
  36      "name": "Amazon: Nova 2 Lite",
  37      "cost_per_1m_in": 0.3,
  38      "cost_per_1m_out": 2.5,
  39      "cost_per_1m_in_cached": 0,
  40      "cost_per_1m_out_cached": 0,
  41      "context_window": 1000000,
  42      "default_max_tokens": 32767,
  43      "can_reason": true,
  44      "reasoning_levels": [
  45        "low",
  46        "medium",
  47        "high"
  48      ],
  49      "default_reasoning_effort": "medium",
  50      "supports_attachments": true
  51    },
  52    {
  53      "id": "amazon/nova-lite-v1",
  54      "name": "Amazon: Nova Lite 1.0",
  55      "cost_per_1m_in": 0.06,
  56      "cost_per_1m_out": 0.24,
  57      "cost_per_1m_in_cached": 0,
  58      "cost_per_1m_out_cached": 0,
  59      "context_window": 300000,
  60      "default_max_tokens": 2560,
  61      "can_reason": false,
  62      "supports_attachments": true
  63    },
  64    {
  65      "id": "amazon/nova-micro-v1",
  66      "name": "Amazon: Nova Micro 1.0",
  67      "cost_per_1m_in": 0.035,
  68      "cost_per_1m_out": 0.14,
  69      "cost_per_1m_in_cached": 0,
  70      "cost_per_1m_out_cached": 0,
  71      "context_window": 128000,
  72      "default_max_tokens": 2560,
  73      "can_reason": false,
  74      "supports_attachments": false
  75    },
  76    {
  77      "id": "amazon/nova-premier-v1",
  78      "name": "Amazon: Nova Premier 1.0",
  79      "cost_per_1m_in": 2.5,
  80      "cost_per_1m_out": 12.5,
  81      "cost_per_1m_in_cached": 0,
  82      "cost_per_1m_out_cached": 0.625,
  83      "context_window": 1000000,
  84      "default_max_tokens": 16000,
  85      "can_reason": false,
  86      "supports_attachments": true
  87    },
  88    {
  89      "id": "amazon/nova-pro-v1",
  90      "name": "Amazon: Nova Pro 1.0",
  91      "cost_per_1m_in": 0.8,
  92      "cost_per_1m_out": 3.2,
  93      "cost_per_1m_in_cached": 0,
  94      "cost_per_1m_out_cached": 0,
  95      "context_window": 300000,
  96      "default_max_tokens": 2560,
  97      "can_reason": false,
  98      "supports_attachments": true
  99    },
 100    {
 101      "id": "anthropic/claude-3-haiku",
 102      "name": "Anthropic: Claude 3 Haiku",
 103      "cost_per_1m_in": 0.25,
 104      "cost_per_1m_out": 1.25,
 105      "cost_per_1m_in_cached": 0.3,
 106      "cost_per_1m_out_cached": 0.03,
 107      "context_window": 200000,
 108      "default_max_tokens": 2048,
 109      "can_reason": false,
 110      "supports_attachments": true
 111    },
 112    {
 113      "id": "anthropic/claude-3.5-haiku",
 114      "name": "Anthropic: Claude 3.5 Haiku",
 115      "cost_per_1m_in": 0.8,
 116      "cost_per_1m_out": 4,
 117      "cost_per_1m_in_cached": 1,
 118      "cost_per_1m_out_cached": 0.08,
 119      "context_window": 200000,
 120      "default_max_tokens": 4096,
 121      "can_reason": false,
 122      "supports_attachments": true
 123    },
 124    {
 125      "id": "anthropic/claude-3.7-sonnet",
 126      "name": "Anthropic: Claude 3.7 Sonnet",
 127      "cost_per_1m_in": 3,
 128      "cost_per_1m_out": 15,
 129      "cost_per_1m_in_cached": 3.75,
 130      "cost_per_1m_out_cached": 0.3,
 131      "context_window": 200000,
 132      "default_max_tokens": 64000,
 133      "can_reason": true,
 134      "reasoning_levels": [
 135        "low",
 136        "medium",
 137        "high"
 138      ],
 139      "default_reasoning_effort": "medium",
 140      "supports_attachments": true
 141    },
 142    {
 143      "id": "anthropic/claude-3.7-sonnet:thinking",
 144      "name": "Anthropic: Claude 3.7 Sonnet (thinking)",
 145      "cost_per_1m_in": 3,
 146      "cost_per_1m_out": 15,
 147      "cost_per_1m_in_cached": 3.75,
 148      "cost_per_1m_out_cached": 0.3,
 149      "context_window": 200000,
 150      "default_max_tokens": 32000,
 151      "can_reason": true,
 152      "reasoning_levels": [
 153        "low",
 154        "medium",
 155        "high"
 156      ],
 157      "default_reasoning_effort": "medium",
 158      "supports_attachments": true
 159    },
 160    {
 161      "id": "anthropic/claude-haiku-4.5",
 162      "name": "Anthropic: Claude Haiku 4.5",
 163      "cost_per_1m_in": 1,
 164      "cost_per_1m_out": 5,
 165      "cost_per_1m_in_cached": 1.25,
 166      "cost_per_1m_out_cached": 0.1,
 167      "context_window": 200000,
 168      "default_max_tokens": 32000,
 169      "can_reason": true,
 170      "reasoning_levels": [
 171        "low",
 172        "medium",
 173        "high"
 174      ],
 175      "default_reasoning_effort": "medium",
 176      "supports_attachments": true
 177    },
 178    {
 179      "id": "anthropic/claude-opus-4",
 180      "name": "Anthropic: Claude Opus 4",
 181      "cost_per_1m_in": 15,
 182      "cost_per_1m_out": 75,
 183      "cost_per_1m_in_cached": 18.75,
 184      "cost_per_1m_out_cached": 1.5,
 185      "context_window": 200000,
 186      "default_max_tokens": 16000,
 187      "can_reason": true,
 188      "reasoning_levels": [
 189        "low",
 190        "medium",
 191        "high"
 192      ],
 193      "default_reasoning_effort": "medium",
 194      "supports_attachments": true
 195    },
 196    {
 197      "id": "anthropic/claude-opus-4.1",
 198      "name": "Anthropic: Claude Opus 4.1",
 199      "cost_per_1m_in": 15,
 200      "cost_per_1m_out": 75,
 201      "cost_per_1m_in_cached": 18.75,
 202      "cost_per_1m_out_cached": 1.5,
 203      "context_window": 200000,
 204      "default_max_tokens": 16000,
 205      "can_reason": true,
 206      "reasoning_levels": [
 207        "low",
 208        "medium",
 209        "high"
 210      ],
 211      "default_reasoning_effort": "medium",
 212      "supports_attachments": true
 213    },
 214    {
 215      "id": "anthropic/claude-opus-4.5",
 216      "name": "Anthropic: Claude Opus 4.5",
 217      "cost_per_1m_in": 5,
 218      "cost_per_1m_out": 25,
 219      "cost_per_1m_in_cached": 6.25,
 220      "cost_per_1m_out_cached": 0.5,
 221      "context_window": 200000,
 222      "default_max_tokens": 32000,
 223      "can_reason": true,
 224      "reasoning_levels": [
 225        "low",
 226        "medium",
 227        "high"
 228      ],
 229      "default_reasoning_effort": "medium",
 230      "supports_attachments": true
 231    },
 232    {
 233      "id": "anthropic/claude-opus-4.6",
 234      "name": "Anthropic: Claude Opus 4.6",
 235      "cost_per_1m_in": 5,
 236      "cost_per_1m_out": 25,
 237      "cost_per_1m_in_cached": 6.25,
 238      "cost_per_1m_out_cached": 0.5,
 239      "context_window": 1000000,
 240      "default_max_tokens": 64000,
 241      "can_reason": true,
 242      "reasoning_levels": [
 243        "low",
 244        "medium",
 245        "high"
 246      ],
 247      "default_reasoning_effort": "medium",
 248      "supports_attachments": true
 249    },
 250    {
 251      "id": "anthropic/claude-opus-4.6-fast",
 252      "name": "Anthropic: Claude Opus 4.6 (Fast)",
 253      "cost_per_1m_in": 30,
 254      "cost_per_1m_out": 150,
 255      "cost_per_1m_in_cached": 37.5,
 256      "cost_per_1m_out_cached": 3,
 257      "context_window": 1000000,
 258      "default_max_tokens": 64000,
 259      "can_reason": true,
 260      "reasoning_levels": [
 261        "low",
 262        "medium",
 263        "high"
 264      ],
 265      "default_reasoning_effort": "medium",
 266      "supports_attachments": true
 267    },
 268    {
 269      "id": "anthropic/claude-opus-4.7",
 270      "name": "Anthropic: Claude Opus 4.7",
 271      "cost_per_1m_in": 5,
 272      "cost_per_1m_out": 25,
 273      "cost_per_1m_in_cached": 6.25,
 274      "cost_per_1m_out_cached": 0.5,
 275      "context_window": 1000000,
 276      "default_max_tokens": 64000,
 277      "can_reason": true,
 278      "reasoning_levels": [
 279        "low",
 280        "medium",
 281        "high"
 282      ],
 283      "default_reasoning_effort": "medium",
 284      "supports_attachments": true
 285    },
 286    {
 287      "id": "anthropic/claude-sonnet-4",
 288      "name": "Anthropic: Claude Sonnet 4",
 289      "cost_per_1m_in": 3,
 290      "cost_per_1m_out": 15,
 291      "cost_per_1m_in_cached": 3.75,
 292      "cost_per_1m_out_cached": 0.3,
 293      "context_window": 1000000,
 294      "default_max_tokens": 32000,
 295      "can_reason": true,
 296      "reasoning_levels": [
 297        "low",
 298        "medium",
 299        "high"
 300      ],
 301      "default_reasoning_effort": "medium",
 302      "supports_attachments": true
 303    },
 304    {
 305      "id": "anthropic/claude-sonnet-4.5",
 306      "name": "Anthropic: Claude Sonnet 4.5",
 307      "cost_per_1m_in": 3,
 308      "cost_per_1m_out": 15,
 309      "cost_per_1m_in_cached": 3.75,
 310      "cost_per_1m_out_cached": 0.3,
 311      "context_window": 1000000,
 312      "default_max_tokens": 32000,
 313      "can_reason": true,
 314      "reasoning_levels": [
 315        "low",
 316        "medium",
 317        "high"
 318      ],
 319      "default_reasoning_effort": "medium",
 320      "supports_attachments": true
 321    },
 322    {
 323      "id": "anthropic/claude-sonnet-4.6",
 324      "name": "Anthropic: Claude Sonnet 4.6",
 325      "cost_per_1m_in": 3,
 326      "cost_per_1m_out": 15,
 327      "cost_per_1m_in_cached": 3.75,
 328      "cost_per_1m_out_cached": 0.3,
 329      "context_window": 1000000,
 330      "default_max_tokens": 64000,
 331      "can_reason": true,
 332      "reasoning_levels": [
 333        "low",
 334        "medium",
 335        "high"
 336      ],
 337      "default_reasoning_effort": "medium",
 338      "supports_attachments": true
 339    },
 340    {
 341      "id": "arcee-ai/trinity-large-preview",
 342      "name": "Arcee AI: Trinity Large Preview",
 343      "cost_per_1m_in": 0.15,
 344      "cost_per_1m_out": 0.45,
 345      "cost_per_1m_in_cached": 0,
 346      "cost_per_1m_out_cached": 0,
 347      "context_window": 131000,
 348      "default_max_tokens": 13100,
 349      "can_reason": false,
 350      "supports_attachments": false
 351    },
 352    {
 353      "id": "arcee-ai/trinity-large-thinking",
 354      "name": "Arcee AI: Trinity Large Thinking",
 355      "cost_per_1m_in": 0.22,
 356      "cost_per_1m_out": 0.85,
 357      "cost_per_1m_in_cached": 0,
 358      "cost_per_1m_out_cached": 0.06,
 359      "context_window": 262144,
 360      "default_max_tokens": 131072,
 361      "can_reason": true,
 362      "reasoning_levels": [
 363        "low",
 364        "medium",
 365        "high"
 366      ],
 367      "default_reasoning_effort": "medium",
 368      "supports_attachments": false
 369    },
 370    {
 371      "id": "arcee-ai/trinity-mini",
 372      "name": "Arcee AI: Trinity Mini",
 373      "cost_per_1m_in": 0.045,
 374      "cost_per_1m_out": 0.15,
 375      "cost_per_1m_in_cached": 0,
 376      "cost_per_1m_out_cached": 0,
 377      "context_window": 131072,
 378      "default_max_tokens": 65536,
 379      "can_reason": true,
 380      "reasoning_levels": [
 381        "low",
 382        "medium",
 383        "high"
 384      ],
 385      "default_reasoning_effort": "medium",
 386      "supports_attachments": false
 387    },
 388    {
 389      "id": "arcee-ai/virtuoso-large",
 390      "name": "Arcee AI: Virtuoso Large",
 391      "cost_per_1m_in": 0.75,
 392      "cost_per_1m_out": 1.2,
 393      "cost_per_1m_in_cached": 0,
 394      "cost_per_1m_out_cached": 0,
 395      "context_window": 131072,
 396      "default_max_tokens": 32000,
 397      "can_reason": false,
 398      "supports_attachments": false
 399    },
 400    {
 401      "id": "baidu/ernie-4.5-21b-a3b",
 402      "name": "Baidu: ERNIE 4.5 21B A3B",
 403      "cost_per_1m_in": 0.07,
 404      "cost_per_1m_out": 0.28,
 405      "cost_per_1m_in_cached": 0,
 406      "cost_per_1m_out_cached": 0,
 407      "context_window": 120000,
 408      "default_max_tokens": 4000,
 409      "can_reason": false,
 410      "supports_attachments": false
 411    },
 412    {
 413      "id": "baidu/ernie-4.5-vl-28b-a3b",
 414      "name": "Baidu: ERNIE 4.5 VL 28B A3B",
 415      "cost_per_1m_in": 0.14,
 416      "cost_per_1m_out": 0.56,
 417      "cost_per_1m_in_cached": 0,
 418      "cost_per_1m_out_cached": 0,
 419      "context_window": 30000,
 420      "default_max_tokens": 4000,
 421      "can_reason": true,
 422      "reasoning_levels": [
 423        "low",
 424        "medium",
 425        "high"
 426      ],
 427      "default_reasoning_effort": "medium",
 428      "supports_attachments": true
 429    },
 430    {
 431      "id": "bytedance-seed/seed-1.6",
 432      "name": "ByteDance Seed: Seed 1.6",
 433      "cost_per_1m_in": 0.25,
 434      "cost_per_1m_out": 2,
 435      "cost_per_1m_in_cached": 0,
 436      "cost_per_1m_out_cached": 0,
 437      "context_window": 262144,
 438      "default_max_tokens": 16384,
 439      "can_reason": true,
 440      "reasoning_levels": [
 441        "low",
 442        "medium",
 443        "high"
 444      ],
 445      "default_reasoning_effort": "medium",
 446      "supports_attachments": true
 447    },
 448    {
 449      "id": "bytedance-seed/seed-1.6-flash",
 450      "name": "ByteDance Seed: Seed 1.6 Flash",
 451      "cost_per_1m_in": 0.075,
 452      "cost_per_1m_out": 0.3,
 453      "cost_per_1m_in_cached": 0,
 454      "cost_per_1m_out_cached": 0,
 455      "context_window": 262144,
 456      "default_max_tokens": 16384,
 457      "can_reason": true,
 458      "reasoning_levels": [
 459        "low",
 460        "medium",
 461        "high"
 462      ],
 463      "default_reasoning_effort": "medium",
 464      "supports_attachments": true
 465    },
 466    {
 467      "id": "bytedance-seed/seed-2.0-lite",
 468      "name": "ByteDance Seed: Seed-2.0-Lite",
 469      "cost_per_1m_in": 0.25,
 470      "cost_per_1m_out": 2,
 471      "cost_per_1m_in_cached": 0,
 472      "cost_per_1m_out_cached": 0,
 473      "context_window": 262144,
 474      "default_max_tokens": 65536,
 475      "can_reason": true,
 476      "reasoning_levels": [
 477        "low",
 478        "medium",
 479        "high"
 480      ],
 481      "default_reasoning_effort": "medium",
 482      "supports_attachments": true
 483    },
 484    {
 485      "id": "bytedance-seed/seed-2.0-mini",
 486      "name": "ByteDance Seed: Seed-2.0-Mini",
 487      "cost_per_1m_in": 0.1,
 488      "cost_per_1m_out": 0.4,
 489      "cost_per_1m_in_cached": 0,
 490      "cost_per_1m_out_cached": 0,
 491      "context_window": 262144,
 492      "default_max_tokens": 65536,
 493      "can_reason": true,
 494      "reasoning_levels": [
 495        "low",
 496        "medium",
 497        "high"
 498      ],
 499      "default_reasoning_effort": "medium",
 500      "supports_attachments": true
 501    },
 502    {
 503      "id": "cohere/command-r-08-2024",
 504      "name": "Cohere: Command R (08-2024)",
 505      "cost_per_1m_in": 0.15,
 506      "cost_per_1m_out": 0.6,
 507      "cost_per_1m_in_cached": 0,
 508      "cost_per_1m_out_cached": 0,
 509      "context_window": 128000,
 510      "default_max_tokens": 2000,
 511      "can_reason": false,
 512      "supports_attachments": false
 513    },
 514    {
 515      "id": "cohere/command-r-plus-08-2024",
 516      "name": "Cohere: Command R+ (08-2024)",
 517      "cost_per_1m_in": 2.5,
 518      "cost_per_1m_out": 10,
 519      "cost_per_1m_in_cached": 0,
 520      "cost_per_1m_out_cached": 0,
 521      "context_window": 128000,
 522      "default_max_tokens": 2000,
 523      "can_reason": false,
 524      "supports_attachments": false
 525    },
 526    {
 527      "id": "deepseek/deepseek-chat",
 528      "name": "DeepSeek: DeepSeek V3",
 529      "cost_per_1m_in": 0.4,
 530      "cost_per_1m_out": 1.3,
 531      "cost_per_1m_in_cached": 0,
 532      "cost_per_1m_out_cached": 0,
 533      "context_window": 64000,
 534      "default_max_tokens": 8000,
 535      "can_reason": false,
 536      "supports_attachments": false
 537    },
 538    {
 539      "id": "deepseek/deepseek-chat-v3-0324",
 540      "name": "DeepSeek: DeepSeek V3 0324",
 541      "cost_per_1m_in": 0.25,
 542      "cost_per_1m_out": 1,
 543      "cost_per_1m_in_cached": 0,
 544      "cost_per_1m_out_cached": 0,
 545      "context_window": 163840,
 546      "default_max_tokens": 81920,
 547      "can_reason": false,
 548      "supports_attachments": false
 549    },
 550    {
 551      "id": "deepseek/deepseek-chat-v3.1",
 552      "name": "DeepSeek: DeepSeek V3.1",
 553      "cost_per_1m_in": 0.6,
 554      "cost_per_1m_out": 1.7,
 555      "cost_per_1m_in_cached": 0,
 556      "cost_per_1m_out_cached": 0,
 557      "context_window": 163840,
 558      "default_max_tokens": 16384,
 559      "can_reason": true,
 560      "reasoning_levels": [
 561        "low",
 562        "medium",
 563        "high"
 564      ],
 565      "default_reasoning_effort": "medium",
 566      "supports_attachments": false
 567    },
 568    {
 569      "id": "deepseek/deepseek-v3.1-terminus",
 570      "name": "DeepSeek: DeepSeek V3.1 Terminus",
 571      "cost_per_1m_in": 0.21,
 572      "cost_per_1m_out": 0.79,
 573      "cost_per_1m_in_cached": 0,
 574      "cost_per_1m_out_cached": 0.13,
 575      "context_window": 163840,
 576      "default_max_tokens": 16384,
 577      "can_reason": true,
 578      "reasoning_levels": [
 579        "low",
 580        "medium",
 581        "high"
 582      ],
 583      "default_reasoning_effort": "medium",
 584      "supports_attachments": false
 585    },
 586    {
 587      "id": "deepseek/deepseek-v3.2",
 588      "name": "DeepSeek: DeepSeek V3.2",
 589      "cost_per_1m_in": 0.26,
 590      "cost_per_1m_out": 0.38,
 591      "cost_per_1m_in_cached": 0,
 592      "cost_per_1m_out_cached": 0.13,
 593      "context_window": 163840,
 594      "default_max_tokens": 16384,
 595      "can_reason": true,
 596      "reasoning_levels": [
 597        "low",
 598        "medium",
 599        "high"
 600      ],
 601      "default_reasoning_effort": "medium",
 602      "supports_attachments": false
 603    },
 604    {
 605      "id": "deepseek/deepseek-v3.2-exp",
 606      "name": "DeepSeek: DeepSeek V3.2 Exp",
 607      "cost_per_1m_in": 0.27,
 608      "cost_per_1m_out": 0.41,
 609      "cost_per_1m_in_cached": 0,
 610      "cost_per_1m_out_cached": 0,
 611      "context_window": 163840,
 612      "default_max_tokens": 32768,
 613      "can_reason": true,
 614      "reasoning_levels": [
 615        "low",
 616        "medium",
 617        "high"
 618      ],
 619      "default_reasoning_effort": "medium",
 620      "supports_attachments": false
 621    },
 622    {
 623      "id": "deepseek/deepseek-v4-flash",
 624      "name": "DeepSeek: DeepSeek V4 Flash",
 625      "cost_per_1m_in": 0.14,
 626      "cost_per_1m_out": 0.28,
 627      "cost_per_1m_in_cached": 0,
 628      "cost_per_1m_out_cached": 0.028,
 629      "context_window": 1048576,
 630      "default_max_tokens": 192000,
 631      "can_reason": true,
 632      "reasoning_levels": [
 633        "low",
 634        "medium",
 635        "high"
 636      ],
 637      "default_reasoning_effort": "medium",
 638      "supports_attachments": false
 639    },
 640    {
 641      "id": "deepseek/deepseek-v4-pro",
 642      "name": "DeepSeek: DeepSeek V4 Pro",
 643      "cost_per_1m_in": 1.74,
 644      "cost_per_1m_out": 3.48,
 645      "cost_per_1m_in_cached": 0,
 646      "cost_per_1m_out_cached": 0.145,
 647      "context_window": 1048576,
 648      "default_max_tokens": 192000,
 649      "can_reason": true,
 650      "reasoning_levels": [
 651        "low",
 652        "medium",
 653        "high"
 654      ],
 655      "default_reasoning_effort": "medium",
 656      "supports_attachments": false
 657    },
 658    {
 659      "id": "deepseek/deepseek-r1",
 660      "name": "DeepSeek: R1",
 661      "cost_per_1m_in": 0.7,
 662      "cost_per_1m_out": 2.5,
 663      "cost_per_1m_in_cached": 0,
 664      "cost_per_1m_out_cached": 0,
 665      "context_window": 64000,
 666      "default_max_tokens": 8000,
 667      "can_reason": true,
 668      "reasoning_levels": [
 669        "low",
 670        "medium",
 671        "high"
 672      ],
 673      "default_reasoning_effort": "medium",
 674      "supports_attachments": false
 675    },
 676    {
 677      "id": "deepseek/deepseek-r1-0528",
 678      "name": "DeepSeek: R1 0528",
 679      "cost_per_1m_in": 0.5,
 680      "cost_per_1m_out": 2.18,
 681      "cost_per_1m_in_cached": 0,
 682      "cost_per_1m_out_cached": 0,
 683      "context_window": 163840,
 684      "default_max_tokens": 81920,
 685      "can_reason": true,
 686      "reasoning_levels": [
 687        "low",
 688        "medium",
 689        "high"
 690      ],
 691      "default_reasoning_effort": "medium",
 692      "supports_attachments": false
 693    },
 694    {
 695      "id": "essentialai/rnj-1-instruct",
 696      "name": "EssentialAI: Rnj 1 Instruct",
 697      "cost_per_1m_in": 0.15,
 698      "cost_per_1m_out": 0.15,
 699      "cost_per_1m_in_cached": 0,
 700      "cost_per_1m_out_cached": 0,
 701      "context_window": 32768,
 702      "default_max_tokens": 3276,
 703      "can_reason": false,
 704      "supports_attachments": false
 705    },
 706    {
 707      "id": "google/gemini-2.0-flash-001",
 708      "name": "Google: Gemini 2.0 Flash",
 709      "cost_per_1m_in": 0.1,
 710      "cost_per_1m_out": 0.4,
 711      "cost_per_1m_in_cached": 0.08333,
 712      "cost_per_1m_out_cached": 0.025,
 713      "context_window": 1048576,
 714      "default_max_tokens": 4096,
 715      "can_reason": false,
 716      "supports_attachments": true
 717    },
 718    {
 719      "id": "google/gemini-2.0-flash-lite-001",
 720      "name": "Google: Gemini 2.0 Flash Lite",
 721      "cost_per_1m_in": 0.075,
 722      "cost_per_1m_out": 0.3,
 723      "cost_per_1m_in_cached": 0,
 724      "cost_per_1m_out_cached": 0,
 725      "context_window": 1048576,
 726      "default_max_tokens": 4096,
 727      "can_reason": false,
 728      "supports_attachments": true
 729    },
 730    {
 731      "id": "google/gemini-2.5-flash",
 732      "name": "Google: Gemini 2.5 Flash",
 733      "cost_per_1m_in": 0.3,
 734      "cost_per_1m_out": 2.5,
 735      "cost_per_1m_in_cached": 0.08333,
 736      "cost_per_1m_out_cached": 0.03,
 737      "context_window": 1048576,
 738      "default_max_tokens": 32767,
 739      "can_reason": true,
 740      "reasoning_levels": [
 741        "low",
 742        "medium",
 743        "high"
 744      ],
 745      "default_reasoning_effort": "medium",
 746      "supports_attachments": true
 747    },
 748    {
 749      "id": "google/gemini-2.5-flash-lite",
 750      "name": "Google: Gemini 2.5 Flash Lite",
 751      "cost_per_1m_in": 0.1,
 752      "cost_per_1m_out": 0.4,
 753      "cost_per_1m_in_cached": 0.08333,
 754      "cost_per_1m_out_cached": 0.01,
 755      "context_window": 1048576,
 756      "default_max_tokens": 32767,
 757      "can_reason": true,
 758      "reasoning_levels": [
 759        "low",
 760        "medium",
 761        "high"
 762      ],
 763      "default_reasoning_effort": "medium",
 764      "supports_attachments": true
 765    },
 766    {
 767      "id": "google/gemini-2.5-flash-lite-preview-09-2025",
 768      "name": "Google: Gemini 2.5 Flash Lite Preview 09-2025",
 769      "cost_per_1m_in": 0.1,
 770      "cost_per_1m_out": 0.4,
 771      "cost_per_1m_in_cached": 0.08333,
 772      "cost_per_1m_out_cached": 0.01,
 773      "context_window": 1048576,
 774      "default_max_tokens": 32767,
 775      "can_reason": true,
 776      "reasoning_levels": [
 777        "low",
 778        "medium",
 779        "high"
 780      ],
 781      "default_reasoning_effort": "medium",
 782      "supports_attachments": true
 783    },
 784    {
 785      "id": "google/gemini-2.5-pro",
 786      "name": "Google: Gemini 2.5 Pro",
 787      "cost_per_1m_in": 1.25,
 788      "cost_per_1m_out": 10,
 789      "cost_per_1m_in_cached": 0.375,
 790      "cost_per_1m_out_cached": 0.125,
 791      "context_window": 1048576,
 792      "default_max_tokens": 32768,
 793      "can_reason": true,
 794      "reasoning_levels": [
 795        "low",
 796        "medium",
 797        "high"
 798      ],
 799      "default_reasoning_effort": "medium",
 800      "supports_attachments": true
 801    },
 802    {
 803      "id": "google/gemini-2.5-pro-preview-05-06",
 804      "name": "Google: Gemini 2.5 Pro Preview 05-06",
 805      "cost_per_1m_in": 1.25,
 806      "cost_per_1m_out": 10,
 807      "cost_per_1m_in_cached": 0.375,
 808      "cost_per_1m_out_cached": 0.125,
 809      "context_window": 1048576,
 810      "default_max_tokens": 32768,
 811      "can_reason": true,
 812      "reasoning_levels": [
 813        "low",
 814        "medium",
 815        "high"
 816      ],
 817      "default_reasoning_effort": "medium",
 818      "supports_attachments": true
 819    },
 820    {
 821      "id": "google/gemini-2.5-pro-preview",
 822      "name": "Google: Gemini 2.5 Pro Preview 06-05",
 823      "cost_per_1m_in": 1.25,
 824      "cost_per_1m_out": 10,
 825      "cost_per_1m_in_cached": 0.375,
 826      "cost_per_1m_out_cached": 0.125,
 827      "context_window": 1048576,
 828      "default_max_tokens": 32768,
 829      "can_reason": true,
 830      "reasoning_levels": [
 831        "low",
 832        "medium",
 833        "high"
 834      ],
 835      "default_reasoning_effort": "medium",
 836      "supports_attachments": true
 837    },
 838    {
 839      "id": "google/gemini-3-flash-preview",
 840      "name": "Google: Gemini 3 Flash Preview",
 841      "cost_per_1m_in": 0.5,
 842      "cost_per_1m_out": 3,
 843      "cost_per_1m_in_cached": 0.08333,
 844      "cost_per_1m_out_cached": 0.05,
 845      "context_window": 1048576,
 846      "default_max_tokens": 32768,
 847      "can_reason": true,
 848      "reasoning_levels": [
 849        "low",
 850        "medium",
 851        "high"
 852      ],
 853      "default_reasoning_effort": "medium",
 854      "supports_attachments": true
 855    },
 856    {
 857      "id": "google/gemini-3.1-flash-lite-preview",
 858      "name": "Google: Gemini 3.1 Flash Lite Preview",
 859      "cost_per_1m_in": 0.25,
 860      "cost_per_1m_out": 1.5,
 861      "cost_per_1m_in_cached": 0.08333,
 862      "cost_per_1m_out_cached": 0.025,
 863      "context_window": 1048576,
 864      "default_max_tokens": 32768,
 865      "can_reason": true,
 866      "reasoning_levels": [
 867        "low",
 868        "medium",
 869        "high"
 870      ],
 871      "default_reasoning_effort": "medium",
 872      "supports_attachments": true
 873    },
 874    {
 875      "id": "google/gemini-3.1-pro-preview",
 876      "name": "Google: Gemini 3.1 Pro Preview",
 877      "cost_per_1m_in": 2,
 878      "cost_per_1m_out": 12,
 879      "cost_per_1m_in_cached": 0.375,
 880      "cost_per_1m_out_cached": 0.2,
 881      "context_window": 1048576,
 882      "default_max_tokens": 32768,
 883      "can_reason": true,
 884      "reasoning_levels": [
 885        "low",
 886        "medium",
 887        "high"
 888      ],
 889      "default_reasoning_effort": "medium",
 890      "supports_attachments": true
 891    },
 892    {
 893      "id": "google/gemini-3.1-pro-preview-customtools",
 894      "name": "Google: Gemini 3.1 Pro Preview Custom Tools",
 895      "cost_per_1m_in": 2,
 896      "cost_per_1m_out": 12,
 897      "cost_per_1m_in_cached": 0.375,
 898      "cost_per_1m_out_cached": 0.2,
 899      "context_window": 1048576,
 900      "default_max_tokens": 32768,
 901      "can_reason": true,
 902      "reasoning_levels": [
 903        "low",
 904        "medium",
 905        "high"
 906      ],
 907      "default_reasoning_effort": "medium",
 908      "supports_attachments": true
 909    },
 910    {
 911      "id": "google/gemma-4-26b-a4b-it",
 912      "name": "Google: Gemma 4 26B A4B ",
 913      "cost_per_1m_in": 0.09,
 914      "cost_per_1m_out": 0.4,
 915      "cost_per_1m_in_cached": 0,
 916      "cost_per_1m_out_cached": 0.06,
 917      "context_window": 262144,
 918      "default_max_tokens": 131072,
 919      "can_reason": true,
 920      "reasoning_levels": [
 921        "low",
 922        "medium",
 923        "high"
 924      ],
 925      "default_reasoning_effort": "medium",
 926      "supports_attachments": true
 927    },
 928    {
 929      "id": "google/gemma-4-26b-a4b-it:free",
 930      "name": "Google: Gemma 4 26B A4B  (free)",
 931      "cost_per_1m_in": 0,
 932      "cost_per_1m_out": 0,
 933      "cost_per_1m_in_cached": 0,
 934      "cost_per_1m_out_cached": 0,
 935      "context_window": 262144,
 936      "default_max_tokens": 16384,
 937      "can_reason": true,
 938      "reasoning_levels": [
 939        "low",
 940        "medium",
 941        "high"
 942      ],
 943      "default_reasoning_effort": "medium",
 944      "supports_attachments": true
 945    },
 946    {
 947      "id": "google/gemma-4-31b-it",
 948      "name": "Google: Gemma 4 31B",
 949      "cost_per_1m_in": 0.13,
 950      "cost_per_1m_out": 0.38,
 951      "cost_per_1m_in_cached": 0,
 952      "cost_per_1m_out_cached": 0,
 953      "context_window": 262144,
 954      "default_max_tokens": 26214,
 955      "can_reason": true,
 956      "reasoning_levels": [
 957        "low",
 958        "medium",
 959        "high"
 960      ],
 961      "default_reasoning_effort": "medium",
 962      "supports_attachments": true
 963    },
 964    {
 965      "id": "google/gemma-4-31b-it:free",
 966      "name": "Google: Gemma 4 31B (free)",
 967      "cost_per_1m_in": 0,
 968      "cost_per_1m_out": 0,
 969      "cost_per_1m_in_cached": 0,
 970      "cost_per_1m_out_cached": 0,
 971      "context_window": 262144,
 972      "default_max_tokens": 16384,
 973      "can_reason": true,
 974      "reasoning_levels": [
 975        "low",
 976        "medium",
 977        "high"
 978      ],
 979      "default_reasoning_effort": "medium",
 980      "supports_attachments": true
 981    },
 982    {
 983      "id": "inception/mercury-2",
 984      "name": "Inception: Mercury 2",
 985      "cost_per_1m_in": 0.25,
 986      "cost_per_1m_out": 0.75,
 987      "cost_per_1m_in_cached": 0,
 988      "cost_per_1m_out_cached": 0.025,
 989      "context_window": 128000,
 990      "default_max_tokens": 25000,
 991      "can_reason": true,
 992      "reasoning_levels": [
 993        "low",
 994        "medium",
 995        "high"
 996      ],
 997      "default_reasoning_effort": "medium",
 998      "supports_attachments": false
 999    },
1000    {
1001      "id": "kwaipilot/kat-coder-pro-v2",
1002      "name": "Kwaipilot: KAT-Coder-Pro V2",
1003      "cost_per_1m_in": 0.3,
1004      "cost_per_1m_out": 1.2,
1005      "cost_per_1m_in_cached": 0,
1006      "cost_per_1m_out_cached": 0.06,
1007      "context_window": 262144,
1008      "default_max_tokens": 72000,
1009      "can_reason": false,
1010      "supports_attachments": false
1011    },
1012    {
1013      "id": "meta-llama/llama-3.1-70b-instruct",
1014      "name": "Meta: Llama 3.1 70B Instruct",
1015      "cost_per_1m_in": 0.4,
1016      "cost_per_1m_out": 0.4,
1017      "cost_per_1m_in_cached": 0,
1018      "cost_per_1m_out_cached": 0,
1019      "context_window": 131072,
1020      "default_max_tokens": 8192,
1021      "can_reason": false,
1022      "supports_attachments": false
1023    },
1024    {
1025      "id": "meta-llama/llama-3.3-70b-instruct",
1026      "name": "Meta: Llama 3.3 70B Instruct",
1027      "cost_per_1m_in": 0.59,
1028      "cost_per_1m_out": 0.79,
1029      "cost_per_1m_in_cached": 0,
1030      "cost_per_1m_out_cached": 0,
1031      "context_window": 131072,
1032      "default_max_tokens": 16384,
1033      "can_reason": false,
1034      "supports_attachments": false
1035    },
1036    {
1037      "id": "meta-llama/llama-3.3-70b-instruct:free",
1038      "name": "Meta: Llama 3.3 70B Instruct (free)",
1039      "cost_per_1m_in": 0,
1040      "cost_per_1m_out": 0,
1041      "cost_per_1m_in_cached": 0,
1042      "cost_per_1m_out_cached": 0,
1043      "context_window": 65536,
1044      "default_max_tokens": 6553,
1045      "can_reason": false,
1046      "supports_attachments": false
1047    },
1048    {
1049      "id": "meta-llama/llama-4-scout",
1050      "name": "Meta: Llama 4 Scout",
1051      "cost_per_1m_in": 0.25,
1052      "cost_per_1m_out": 0.7,
1053      "cost_per_1m_in_cached": 0,
1054      "cost_per_1m_out_cached": 0,
1055      "context_window": 1310720,
1056      "default_max_tokens": 4096,
1057      "can_reason": false,
1058      "supports_attachments": true
1059    },
1060    {
1061      "id": "minimax/minimax-m2",
1062      "name": "MiniMax: MiniMax M2",
1063      "cost_per_1m_in": 0.255,
1064      "cost_per_1m_out": 1,
1065      "cost_per_1m_in_cached": 0,
1066      "cost_per_1m_out_cached": 0.03,
1067      "context_window": 196608,
1068      "default_max_tokens": 98304,
1069      "can_reason": true,
1070      "reasoning_levels": [
1071        "low",
1072        "medium",
1073        "high"
1074      ],
1075      "default_reasoning_effort": "medium",
1076      "supports_attachments": false
1077    },
1078    {
1079      "id": "minimax/minimax-m2.1",
1080      "name": "MiniMax: MiniMax M2.1",
1081      "cost_per_1m_in": 0.3,
1082      "cost_per_1m_out": 1.2,
1083      "cost_per_1m_in_cached": 0,
1084      "cost_per_1m_out_cached": 0.03,
1085      "context_window": 204800,
1086      "default_max_tokens": 65536,
1087      "can_reason": true,
1088      "reasoning_levels": [
1089        "low",
1090        "medium",
1091        "high"
1092      ],
1093      "default_reasoning_effort": "medium",
1094      "supports_attachments": false
1095    },
1096    {
1097      "id": "minimax/minimax-m2.5",
1098      "name": "MiniMax: MiniMax M2.5",
1099      "cost_per_1m_in": 0.3,
1100      "cost_per_1m_out": 1.2,
1101      "cost_per_1m_in_cached": 0,
1102      "cost_per_1m_out_cached": 0.03,
1103      "context_window": 204800,
1104      "default_max_tokens": 65536,
1105      "can_reason": true,
1106      "reasoning_levels": [
1107        "low",
1108        "medium",
1109        "high"
1110      ],
1111      "default_reasoning_effort": "medium",
1112      "supports_attachments": false
1113    },
1114    {
1115      "id": "minimax/minimax-m2.5:free",
1116      "name": "MiniMax: MiniMax M2.5 (free)",
1117      "cost_per_1m_in": 0,
1118      "cost_per_1m_out": 0,
1119      "cost_per_1m_in_cached": 0,
1120      "cost_per_1m_out_cached": 0,
1121      "context_window": 196608,
1122      "default_max_tokens": 4096,
1123      "can_reason": true,
1124      "reasoning_levels": [
1125        "low",
1126        "medium",
1127        "high"
1128      ],
1129      "default_reasoning_effort": "medium",
1130      "supports_attachments": false
1131    },
1132    {
1133      "id": "minimax/minimax-m2.7",
1134      "name": "MiniMax: MiniMax M2.7",
1135      "cost_per_1m_in": 0.3,
1136      "cost_per_1m_out": 1.2,
1137      "cost_per_1m_in_cached": 0,
1138      "cost_per_1m_out_cached": 0.06,
1139      "context_window": 204800,
1140      "default_max_tokens": 65536,
1141      "can_reason": true,
1142      "reasoning_levels": [
1143        "low",
1144        "medium",
1145        "high"
1146      ],
1147      "default_reasoning_effort": "medium",
1148      "supports_attachments": false
1149    },
1150    {
1151      "id": "mistralai/mistral-large",
1152      "name": "Mistral Large",
1153      "cost_per_1m_in": 2,
1154      "cost_per_1m_out": 6,
1155      "cost_per_1m_in_cached": 0,
1156      "cost_per_1m_out_cached": 0.2,
1157      "context_window": 128000,
1158      "default_max_tokens": 12800,
1159      "can_reason": false,
1160      "supports_attachments": false
1161    },
1162    {
1163      "id": "mistralai/mistral-large-2407",
1164      "name": "Mistral Large 2407",
1165      "cost_per_1m_in": 2,
1166      "cost_per_1m_out": 6,
1167      "cost_per_1m_in_cached": 0,
1168      "cost_per_1m_out_cached": 0.2,
1169      "context_window": 131072,
1170      "default_max_tokens": 13107,
1171      "can_reason": false,
1172      "supports_attachments": false
1173    },
1174    {
1175      "id": "mistralai/mistral-large-2411",
1176      "name": "Mistral Large 2411",
1177      "cost_per_1m_in": 2,
1178      "cost_per_1m_out": 6,
1179      "cost_per_1m_in_cached": 0,
1180      "cost_per_1m_out_cached": 0.2,
1181      "context_window": 131072,
1182      "default_max_tokens": 13107,
1183      "can_reason": false,
1184      "supports_attachments": false
1185    },
1186    {
1187      "id": "mistralai/codestral-2508",
1188      "name": "Mistral: Codestral 2508",
1189      "cost_per_1m_in": 0.3,
1190      "cost_per_1m_out": 0.9,
1191      "cost_per_1m_in_cached": 0,
1192      "cost_per_1m_out_cached": 0.03,
1193      "context_window": 256000,
1194      "default_max_tokens": 25600,
1195      "can_reason": false,
1196      "supports_attachments": false
1197    },
1198    {
1199      "id": "mistralai/devstral-2512",
1200      "name": "Mistral: Devstral 2 2512",
1201      "cost_per_1m_in": 0.4,
1202      "cost_per_1m_out": 2,
1203      "cost_per_1m_in_cached": 0,
1204      "cost_per_1m_out_cached": 0.04,
1205      "context_window": 262144,
1206      "default_max_tokens": 26214,
1207      "can_reason": false,
1208      "supports_attachments": false
1209    },
1210    {
1211      "id": "mistralai/devstral-medium",
1212      "name": "Mistral: Devstral Medium",
1213      "cost_per_1m_in": 0.4,
1214      "cost_per_1m_out": 2,
1215      "cost_per_1m_in_cached": 0,
1216      "cost_per_1m_out_cached": 0.04,
1217      "context_window": 131072,
1218      "default_max_tokens": 13107,
1219      "can_reason": false,
1220      "supports_attachments": false
1221    },
1222    {
1223      "id": "mistralai/devstral-small",
1224      "name": "Mistral: Devstral Small 1.1",
1225      "cost_per_1m_in": 0.1,
1226      "cost_per_1m_out": 0.3,
1227      "cost_per_1m_in_cached": 0,
1228      "cost_per_1m_out_cached": 0.01,
1229      "context_window": 131072,
1230      "default_max_tokens": 13107,
1231      "can_reason": false,
1232      "supports_attachments": false
1233    },
1234    {
1235      "id": "mistralai/ministral-14b-2512",
1236      "name": "Mistral: Ministral 3 14B 2512",
1237      "cost_per_1m_in": 0.35,
1238      "cost_per_1m_out": 0.35,
1239      "cost_per_1m_in_cached": 0,
1240      "cost_per_1m_out_cached": 0,
1241      "context_window": 262144,
1242      "default_max_tokens": 131072,
1243      "can_reason": false,
1244      "supports_attachments": true
1245    },
1246    {
1247      "id": "mistralai/ministral-3b-2512",
1248      "name": "Mistral: Ministral 3 3B 2512",
1249      "cost_per_1m_in": 0.15,
1250      "cost_per_1m_out": 0.15,
1251      "cost_per_1m_in_cached": 0,
1252      "cost_per_1m_out_cached": 0,
1253      "context_window": 131072,
1254      "default_max_tokens": 65536,
1255      "can_reason": false,
1256      "supports_attachments": true
1257    },
1258    {
1259      "id": "mistralai/ministral-8b-2512",
1260      "name": "Mistral: Ministral 3 8B 2512",
1261      "cost_per_1m_in": 0.15,
1262      "cost_per_1m_out": 0.15,
1263      "cost_per_1m_in_cached": 0,
1264      "cost_per_1m_out_cached": 0.015,
1265      "context_window": 262144,
1266      "default_max_tokens": 26214,
1267      "can_reason": false,
1268      "supports_attachments": true
1269    },
1270    {
1271      "id": "mistralai/mistral-large-2512",
1272      "name": "Mistral: Mistral Large 3 2512",
1273      "cost_per_1m_in": 0.5,
1274      "cost_per_1m_out": 1.5,
1275      "cost_per_1m_in_cached": 0,
1276      "cost_per_1m_out_cached": 0.05,
1277      "context_window": 262144,
1278      "default_max_tokens": 26214,
1279      "can_reason": false,
1280      "supports_attachments": true
1281    },
1282    {
1283      "id": "mistralai/mistral-medium-3",
1284      "name": "Mistral: Mistral Medium 3",
1285      "cost_per_1m_in": 0.4,
1286      "cost_per_1m_out": 2,
1287      "cost_per_1m_in_cached": 0,
1288      "cost_per_1m_out_cached": 0.04,
1289      "context_window": 131072,
1290      "default_max_tokens": 13107,
1291      "can_reason": false,
1292      "supports_attachments": true
1293    },
1294    {
1295      "id": "mistralai/mistral-medium-3.1",
1296      "name": "Mistral: Mistral Medium 3.1",
1297      "cost_per_1m_in": 0.4,
1298      "cost_per_1m_out": 2,
1299      "cost_per_1m_in_cached": 0,
1300      "cost_per_1m_out_cached": 0.04,
1301      "context_window": 131072,
1302      "default_max_tokens": 13107,
1303      "can_reason": false,
1304      "supports_attachments": true
1305    },
1306    {
1307      "id": "mistralai/mistral-nemo",
1308      "name": "Mistral: Mistral Nemo",
1309      "cost_per_1m_in": 0.15,
1310      "cost_per_1m_out": 0.15,
1311      "cost_per_1m_in_cached": 0,
1312      "cost_per_1m_out_cached": 0.015,
1313      "context_window": 131072,
1314      "default_max_tokens": 13107,
1315      "can_reason": false,
1316      "supports_attachments": false
1317    },
1318    {
1319      "id": "mistralai/mistral-small-3.2-24b-instruct",
1320      "name": "Mistral: Mistral Small 3.2 24B",
1321      "cost_per_1m_in": 0.09375,
1322      "cost_per_1m_out": 0.25,
1323      "cost_per_1m_in_cached": 0,
1324      "cost_per_1m_out_cached": 0,
1325      "context_window": 256000,
1326      "default_max_tokens": 8192,
1327      "can_reason": false,
1328      "supports_attachments": true
1329    },
1330    {
1331      "id": "mistralai/mistral-small-2603",
1332      "name": "Mistral: Mistral Small 4",
1333      "cost_per_1m_in": 0.15,
1334      "cost_per_1m_out": 0.6,
1335      "cost_per_1m_in_cached": 0,
1336      "cost_per_1m_out_cached": 0.015,
1337      "context_window": 262144,
1338      "default_max_tokens": 26214,
1339      "can_reason": true,
1340      "reasoning_levels": [
1341        "low",
1342        "medium",
1343        "high"
1344      ],
1345      "default_reasoning_effort": "medium",
1346      "supports_attachments": true
1347    },
1348    {
1349      "id": "mistralai/mistral-small-creative",
1350      "name": "Mistral: Mistral Small Creative",
1351      "cost_per_1m_in": 0.1,
1352      "cost_per_1m_out": 0.3,
1353      "cost_per_1m_in_cached": 0,
1354      "cost_per_1m_out_cached": 0.01,
1355      "context_window": 32768,
1356      "default_max_tokens": 3276,
1357      "can_reason": false,
1358      "supports_attachments": false
1359    },
1360    {
1361      "id": "mistralai/mixtral-8x22b-instruct",
1362      "name": "Mistral: Mixtral 8x22B Instruct",
1363      "cost_per_1m_in": 2,
1364      "cost_per_1m_out": 6,
1365      "cost_per_1m_in_cached": 0,
1366      "cost_per_1m_out_cached": 0.2,
1367      "context_window": 65536,
1368      "default_max_tokens": 6553,
1369      "can_reason": false,
1370      "supports_attachments": false
1371    },
1372    {
1373      "id": "mistralai/mixtral-8x7b-instruct",
1374      "name": "Mistral: Mixtral 8x7B Instruct",
1375      "cost_per_1m_in": 0.54,
1376      "cost_per_1m_out": 0.54,
1377      "cost_per_1m_in_cached": 0,
1378      "cost_per_1m_out_cached": 0,
1379      "context_window": 32768,
1380      "default_max_tokens": 8192,
1381      "can_reason": false,
1382      "supports_attachments": false
1383    },
1384    {
1385      "id": "mistralai/pixtral-large-2411",
1386      "name": "Mistral: Pixtral Large 2411",
1387      "cost_per_1m_in": 2,
1388      "cost_per_1m_out": 6,
1389      "cost_per_1m_in_cached": 0,
1390      "cost_per_1m_out_cached": 0.2,
1391      "context_window": 131072,
1392      "default_max_tokens": 13107,
1393      "can_reason": false,
1394      "supports_attachments": true
1395    },
1396    {
1397      "id": "mistralai/mistral-saba",
1398      "name": "Mistral: Saba",
1399      "cost_per_1m_in": 0.2,
1400      "cost_per_1m_out": 0.6,
1401      "cost_per_1m_in_cached": 0,
1402      "cost_per_1m_out_cached": 0.02,
1403      "context_window": 32768,
1404      "default_max_tokens": 3276,
1405      "can_reason": false,
1406      "supports_attachments": false
1407    },
1408    {
1409      "id": "mistralai/voxtral-small-24b-2507",
1410      "name": "Mistral: Voxtral Small 24B 2507",
1411      "cost_per_1m_in": 0.1,
1412      "cost_per_1m_out": 0.3,
1413      "cost_per_1m_in_cached": 0,
1414      "cost_per_1m_out_cached": 0.01,
1415      "context_window": 32000,
1416      "default_max_tokens": 3200,
1417      "can_reason": false,
1418      "supports_attachments": false
1419    },
1420    {
1421      "id": "moonshotai/kimi-k2",
1422      "name": "MoonshotAI: Kimi K2 0711",
1423      "cost_per_1m_in": 0.57,
1424      "cost_per_1m_out": 2.3,
1425      "cost_per_1m_in_cached": 0,
1426      "cost_per_1m_out_cached": 0,
1427      "context_window": 131072,
1428      "default_max_tokens": 16384,
1429      "can_reason": false,
1430      "supports_attachments": false
1431    },
1432    {
1433      "id": "moonshotai/kimi-k2-0905",
1434      "name": "MoonshotAI: Kimi K2 0905",
1435      "cost_per_1m_in": 0.6,
1436      "cost_per_1m_out": 2.5,
1437      "cost_per_1m_in_cached": 0,
1438      "cost_per_1m_out_cached": 0.6,
1439      "context_window": 262144,
1440      "default_max_tokens": 4096,
1441      "can_reason": false,
1442      "supports_attachments": false
1443    },
1444    {
1445      "id": "moonshotai/kimi-k2-thinking",
1446      "name": "MoonshotAI: Kimi K2 Thinking",
1447      "cost_per_1m_in": 0.6,
1448      "cost_per_1m_out": 2.5,
1449      "cost_per_1m_in_cached": 0,
1450      "cost_per_1m_out_cached": 0.6,
1451      "context_window": 262144,
1452      "default_max_tokens": 131072,
1453      "can_reason": true,
1454      "reasoning_levels": [
1455        "low",
1456        "medium",
1457        "high"
1458      ],
1459      "default_reasoning_effort": "medium",
1460      "supports_attachments": false
1461    },
1462    {
1463      "id": "moonshotai/kimi-k2.5",
1464      "name": "MoonshotAI: Kimi K2.5",
1465      "cost_per_1m_in": 0.57,
1466      "cost_per_1m_out": 2.85,
1467      "cost_per_1m_in_cached": 0,
1468      "cost_per_1m_out_cached": 0.095,
1469      "context_window": 262144,
1470      "default_max_tokens": 131072,
1471      "can_reason": true,
1472      "reasoning_levels": [
1473        "low",
1474        "medium",
1475        "high"
1476      ],
1477      "default_reasoning_effort": "medium",
1478      "supports_attachments": true
1479    },
1480    {
1481      "id": "moonshotai/kimi-k2.6",
1482      "name": "MoonshotAI: Kimi K2.6",
1483      "cost_per_1m_in": 0.8,
1484      "cost_per_1m_out": 4,
1485      "cost_per_1m_in_cached": 0,
1486      "cost_per_1m_out_cached": 0.25,
1487      "context_window": 262144,
1488      "default_max_tokens": 131072,
1489      "can_reason": true,
1490      "reasoning_levels": [
1491        "low",
1492        "medium",
1493        "high"
1494      ],
1495      "default_reasoning_effort": "medium",
1496      "supports_attachments": true
1497    },
1498    {
1499      "id": "nvidia/llama-3.1-nemotron-70b-instruct",
1500      "name": "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
1501      "cost_per_1m_in": 1.2,
1502      "cost_per_1m_out": 1.2,
1503      "cost_per_1m_in_cached": 0,
1504      "cost_per_1m_out_cached": 0,
1505      "context_window": 131072,
1506      "default_max_tokens": 8192,
1507      "can_reason": false,
1508      "supports_attachments": false
1509    },
1510    {
1511      "id": "nvidia/llama-3.3-nemotron-super-49b-v1.5",
1512      "name": "NVIDIA: Llama 3.3 Nemotron Super 49B V1.5",
1513      "cost_per_1m_in": 0.1,
1514      "cost_per_1m_out": 0.4,
1515      "cost_per_1m_in_cached": 0,
1516      "cost_per_1m_out_cached": 0,
1517      "context_window": 131072,
1518      "default_max_tokens": 13107,
1519      "can_reason": true,
1520      "reasoning_levels": [
1521        "low",
1522        "medium",
1523        "high"
1524      ],
1525      "default_reasoning_effort": "medium",
1526      "supports_attachments": false
1527    },
1528    {
1529      "id": "nvidia/nemotron-3-nano-30b-a3b",
1530      "name": "NVIDIA: Nemotron 3 Nano 30B A3B",
1531      "cost_per_1m_in": 0.05,
1532      "cost_per_1m_out": 0.2,
1533      "cost_per_1m_in_cached": 0,
1534      "cost_per_1m_out_cached": 0,
1535      "context_window": 262144,
1536      "default_max_tokens": 26214,
1537      "can_reason": true,
1538      "reasoning_levels": [
1539        "low",
1540        "medium",
1541        "high"
1542      ],
1543      "default_reasoning_effort": "medium",
1544      "supports_attachments": false
1545    },
1546    {
1547      "id": "nvidia/nemotron-3-nano-30b-a3b:free",
1548      "name": "NVIDIA: Nemotron 3 Nano 30B A3B (free)",
1549      "cost_per_1m_in": 0,
1550      "cost_per_1m_out": 0,
1551      "cost_per_1m_in_cached": 0,
1552      "cost_per_1m_out_cached": 0,
1553      "context_window": 256000,
1554      "default_max_tokens": 25600,
1555      "can_reason": true,
1556      "reasoning_levels": [
1557        "low",
1558        "medium",
1559        "high"
1560      ],
1561      "default_reasoning_effort": "medium",
1562      "supports_attachments": false
1563    },
1564    {
1565      "id": "nvidia/nemotron-3-super-120b-a12b",
1566      "name": "NVIDIA: Nemotron 3 Super",
1567      "cost_per_1m_in": 0.1,
1568      "cost_per_1m_out": 0.5,
1569      "cost_per_1m_in_cached": 0,
1570      "cost_per_1m_out_cached": 0,
1571      "context_window": 262144,
1572      "default_max_tokens": 26214,
1573      "can_reason": true,
1574      "reasoning_levels": [
1575        "low",
1576        "medium",
1577        "high"
1578      ],
1579      "default_reasoning_effort": "medium",
1580      "supports_attachments": false
1581    },
1582    {
1583      "id": "nvidia/nemotron-3-super-120b-a12b:free",
1584      "name": "NVIDIA: Nemotron 3 Super (free)",
1585      "cost_per_1m_in": 0,
1586      "cost_per_1m_out": 0,
1587      "cost_per_1m_in_cached": 0,
1588      "cost_per_1m_out_cached": 0,
1589      "context_window": 262144,
1590      "default_max_tokens": 131072,
1591      "can_reason": true,
1592      "reasoning_levels": [
1593        "low",
1594        "medium",
1595        "high"
1596      ],
1597      "default_reasoning_effort": "medium",
1598      "supports_attachments": false
1599    },
1600    {
1601      "id": "nvidia/nemotron-nano-12b-v2-vl:free",
1602      "name": "NVIDIA: Nemotron Nano 12B 2 VL (free)",
1603      "cost_per_1m_in": 0,
1604      "cost_per_1m_out": 0,
1605      "cost_per_1m_in_cached": 0,
1606      "cost_per_1m_out_cached": 0,
1607      "context_window": 128000,
1608      "default_max_tokens": 64000,
1609      "can_reason": true,
1610      "reasoning_levels": [
1611        "low",
1612        "medium",
1613        "high"
1614      ],
1615      "default_reasoning_effort": "medium",
1616      "supports_attachments": true
1617    },
1618    {
1619      "id": "nvidia/nemotron-nano-9b-v2",
1620      "name": "NVIDIA: Nemotron Nano 9B V2",
1621      "cost_per_1m_in": 0.04,
1622      "cost_per_1m_out": 0.16,
1623      "cost_per_1m_in_cached": 0,
1624      "cost_per_1m_out_cached": 0,
1625      "context_window": 131072,
1626      "default_max_tokens": 13107,
1627      "can_reason": true,
1628      "reasoning_levels": [
1629        "low",
1630        "medium",
1631        "high"
1632      ],
1633      "default_reasoning_effort": "medium",
1634      "supports_attachments": false
1635    },
1636    {
1637      "id": "nvidia/nemotron-nano-9b-v2:free",
1638      "name": "NVIDIA: Nemotron Nano 9B V2 (free)",
1639      "cost_per_1m_in": 0,
1640      "cost_per_1m_out": 0,
1641      "cost_per_1m_in_cached": 0,
1642      "cost_per_1m_out_cached": 0,
1643      "context_window": 128000,
1644      "default_max_tokens": 12800,
1645      "can_reason": true,
1646      "reasoning_levels": [
1647        "low",
1648        "medium",
1649        "high"
1650      ],
1651      "default_reasoning_effort": "medium",
1652      "supports_attachments": false
1653    },
1654    {
1655      "id": "nex-agi/deepseek-v3.1-nex-n1",
1656      "name": "Nex AGI: DeepSeek V3.1 Nex N1",
1657      "cost_per_1m_in": 0.135,
1658      "cost_per_1m_out": 0.5,
1659      "cost_per_1m_in_cached": 0,
1660      "cost_per_1m_out_cached": 0,
1661      "context_window": 131072,
1662      "default_max_tokens": 81920,
1663      "can_reason": false,
1664      "supports_attachments": false
1665    },
1666    {
1667      "id": "openai/gpt-audio",
1668      "name": "OpenAI: GPT Audio",
1669      "cost_per_1m_in": 2.5,
1670      "cost_per_1m_out": 10,
1671      "cost_per_1m_in_cached": 0,
1672      "cost_per_1m_out_cached": 0,
1673      "context_window": 128000,
1674      "default_max_tokens": 8192,
1675      "can_reason": false,
1676      "supports_attachments": false
1677    },
1678    {
1679      "id": "openai/gpt-audio-mini",
1680      "name": "OpenAI: GPT Audio Mini",
1681      "cost_per_1m_in": 0.6,
1682      "cost_per_1m_out": 2.4,
1683      "cost_per_1m_in_cached": 0,
1684      "cost_per_1m_out_cached": 0,
1685      "context_window": 128000,
1686      "default_max_tokens": 8192,
1687      "can_reason": false,
1688      "supports_attachments": false
1689    },
1690    {
1691      "id": "openai/gpt-4-turbo",
1692      "name": "OpenAI: GPT-4 Turbo",
1693      "cost_per_1m_in": 10,
1694      "cost_per_1m_out": 30,
1695      "cost_per_1m_in_cached": 0,
1696      "cost_per_1m_out_cached": 0,
1697      "context_window": 128000,
1698      "default_max_tokens": 2048,
1699      "can_reason": false,
1700      "supports_attachments": true
1701    },
1702    {
1703      "id": "openai/gpt-4-1106-preview",
1704      "name": "OpenAI: GPT-4 Turbo (older v1106)",
1705      "cost_per_1m_in": 10,
1706      "cost_per_1m_out": 30,
1707      "cost_per_1m_in_cached": 0,
1708      "cost_per_1m_out_cached": 0,
1709      "context_window": 128000,
1710      "default_max_tokens": 2048,
1711      "can_reason": false,
1712      "supports_attachments": false
1713    },
1714    {
1715      "id": "openai/gpt-4-turbo-preview",
1716      "name": "OpenAI: GPT-4 Turbo Preview",
1717      "cost_per_1m_in": 10,
1718      "cost_per_1m_out": 30,
1719      "cost_per_1m_in_cached": 0,
1720      "cost_per_1m_out_cached": 0,
1721      "context_window": 128000,
1722      "default_max_tokens": 2048,
1723      "can_reason": false,
1724      "supports_attachments": false
1725    },
1726    {
1727      "id": "openai/gpt-4.1",
1728      "name": "OpenAI: GPT-4.1",
1729      "cost_per_1m_in": 2,
1730      "cost_per_1m_out": 8,
1731      "cost_per_1m_in_cached": 0,
1732      "cost_per_1m_out_cached": 0.5,
1733      "context_window": 1047576,
1734      "default_max_tokens": 104757,
1735      "can_reason": false,
1736      "supports_attachments": true
1737    },
1738    {
1739      "id": "openai/gpt-4.1-mini",
1740      "name": "OpenAI: GPT-4.1 Mini",
1741      "cost_per_1m_in": 0.4,
1742      "cost_per_1m_out": 1.6,
1743      "cost_per_1m_in_cached": 0,
1744      "cost_per_1m_out_cached": 0.1,
1745      "context_window": 1047576,
1746      "default_max_tokens": 104757,
1747      "can_reason": false,
1748      "supports_attachments": true
1749    },
1750    {
1751      "id": "openai/gpt-4.1-nano",
1752      "name": "OpenAI: GPT-4.1 Nano",
1753      "cost_per_1m_in": 0.1,
1754      "cost_per_1m_out": 0.4,
1755      "cost_per_1m_in_cached": 0,
1756      "cost_per_1m_out_cached": 0.03,
1757      "context_window": 1047576,
1758      "default_max_tokens": 104757,
1759      "can_reason": false,
1760      "supports_attachments": true
1761    },
1762    {
1763      "id": "openai/gpt-4o",
1764      "name": "OpenAI: GPT-4o",
1765      "cost_per_1m_in": 2.5,
1766      "cost_per_1m_out": 10,
1767      "cost_per_1m_in_cached": 0,
1768      "cost_per_1m_out_cached": 0,
1769      "context_window": 128000,
1770      "default_max_tokens": 8192,
1771      "can_reason": false,
1772      "supports_attachments": true
1773    },
1774    {
1775      "id": "openai/gpt-4o-2024-05-13",
1776      "name": "OpenAI: GPT-4o (2024-05-13)",
1777      "cost_per_1m_in": 5,
1778      "cost_per_1m_out": 15,
1779      "cost_per_1m_in_cached": 0,
1780      "cost_per_1m_out_cached": 0,
1781      "context_window": 128000,
1782      "default_max_tokens": 2048,
1783      "can_reason": false,
1784      "supports_attachments": true
1785    },
1786    {
1787      "id": "openai/gpt-4o-2024-08-06",
1788      "name": "OpenAI: GPT-4o (2024-08-06)",
1789      "cost_per_1m_in": 2.5,
1790      "cost_per_1m_out": 10,
1791      "cost_per_1m_in_cached": 0,
1792      "cost_per_1m_out_cached": 1.25,
1793      "context_window": 128000,
1794      "default_max_tokens": 8192,
1795      "can_reason": false,
1796      "supports_attachments": true
1797    },
1798    {
1799      "id": "openai/gpt-4o-2024-11-20",
1800      "name": "OpenAI: GPT-4o (2024-11-20)",
1801      "cost_per_1m_in": 2.5,
1802      "cost_per_1m_out": 10,
1803      "cost_per_1m_in_cached": 0,
1804      "cost_per_1m_out_cached": 1.25,
1805      "context_window": 128000,
1806      "default_max_tokens": 8192,
1807      "can_reason": false,
1808      "supports_attachments": true
1809    },
1810    {
1811      "id": "openai/gpt-4o-audio-preview",
1812      "name": "OpenAI: GPT-4o Audio",
1813      "cost_per_1m_in": 2.5,
1814      "cost_per_1m_out": 10,
1815      "cost_per_1m_in_cached": 0,
1816      "cost_per_1m_out_cached": 0,
1817      "context_window": 128000,
1818      "default_max_tokens": 8192,
1819      "can_reason": false,
1820      "supports_attachments": false
1821    },
1822    {
1823      "id": "openai/gpt-4o-mini",
1824      "name": "OpenAI: GPT-4o-mini",
1825      "cost_per_1m_in": 0.15,
1826      "cost_per_1m_out": 0.6,
1827      "cost_per_1m_in_cached": 0,
1828      "cost_per_1m_out_cached": 0.075,
1829      "context_window": 128000,
1830      "default_max_tokens": 8192,
1831      "can_reason": false,
1832      "supports_attachments": true
1833    },
1834    {
1835      "id": "openai/gpt-4o-mini-2024-07-18",
1836      "name": "OpenAI: GPT-4o-mini (2024-07-18)",
1837      "cost_per_1m_in": 0.15,
1838      "cost_per_1m_out": 0.6,
1839      "cost_per_1m_in_cached": 0,
1840      "cost_per_1m_out_cached": 0.075,
1841      "context_window": 128000,
1842      "default_max_tokens": 8192,
1843      "can_reason": false,
1844      "supports_attachments": true
1845    },
1846    {
1847      "id": "openai/gpt-5",
1848      "name": "OpenAI: GPT-5",
1849      "cost_per_1m_in": 1.25,
1850      "cost_per_1m_out": 10,
1851      "cost_per_1m_in_cached": 0,
1852      "cost_per_1m_out_cached": 0.125,
1853      "context_window": 400000,
1854      "default_max_tokens": 64000,
1855      "can_reason": true,
1856      "reasoning_levels": [
1857        "low",
1858        "medium",
1859        "high"
1860      ],
1861      "default_reasoning_effort": "medium",
1862      "supports_attachments": true
1863    },
1864    {
1865      "id": "openai/gpt-5-codex",
1866      "name": "OpenAI: GPT-5 Codex",
1867      "cost_per_1m_in": 1.25,
1868      "cost_per_1m_out": 10,
1869      "cost_per_1m_in_cached": 0,
1870      "cost_per_1m_out_cached": 0.125,
1871      "context_window": 400000,
1872      "default_max_tokens": 64000,
1873      "can_reason": true,
1874      "reasoning_levels": [
1875        "low",
1876        "medium",
1877        "high"
1878      ],
1879      "default_reasoning_effort": "medium",
1880      "supports_attachments": true
1881    },
1882    {
1883      "id": "openai/gpt-5-mini",
1884      "name": "OpenAI: GPT-5 Mini",
1885      "cost_per_1m_in": 0.25,
1886      "cost_per_1m_out": 2,
1887      "cost_per_1m_in_cached": 0,
1888      "cost_per_1m_out_cached": 0.03,
1889      "context_window": 400000,
1890      "default_max_tokens": 40000,
1891      "can_reason": true,
1892      "reasoning_levels": [
1893        "low",
1894        "medium",
1895        "high"
1896      ],
1897      "default_reasoning_effort": "medium",
1898      "supports_attachments": true
1899    },
1900    {
1901      "id": "openai/gpt-5-nano",
1902      "name": "OpenAI: GPT-5 Nano",
1903      "cost_per_1m_in": 0.05,
1904      "cost_per_1m_out": 0.4,
1905      "cost_per_1m_in_cached": 0,
1906      "cost_per_1m_out_cached": 0.005,
1907      "context_window": 400000,
1908      "default_max_tokens": 64000,
1909      "can_reason": true,
1910      "reasoning_levels": [
1911        "low",
1912        "medium",
1913        "high"
1914      ],
1915      "default_reasoning_effort": "medium",
1916      "supports_attachments": true
1917    },
1918    {
1919      "id": "openai/gpt-5-pro",
1920      "name": "OpenAI: GPT-5 Pro",
1921      "cost_per_1m_in": 15,
1922      "cost_per_1m_out": 120,
1923      "cost_per_1m_in_cached": 0,
1924      "cost_per_1m_out_cached": 0,
1925      "context_window": 400000,
1926      "default_max_tokens": 64000,
1927      "can_reason": true,
1928      "reasoning_levels": [
1929        "low",
1930        "medium",
1931        "high"
1932      ],
1933      "default_reasoning_effort": "medium",
1934      "supports_attachments": true
1935    },
1936    {
1937      "id": "openai/gpt-5.1",
1938      "name": "OpenAI: GPT-5.1",
1939      "cost_per_1m_in": 1.25,
1940      "cost_per_1m_out": 10,
1941      "cost_per_1m_in_cached": 0,
1942      "cost_per_1m_out_cached": 0.13,
1943      "context_window": 400000,
1944      "default_max_tokens": 64000,
1945      "can_reason": true,
1946      "reasoning_levels": [
1947        "low",
1948        "medium",
1949        "high"
1950      ],
1951      "default_reasoning_effort": "medium",
1952      "supports_attachments": true
1953    },
1954    {
1955      "id": "openai/gpt-5.1-chat",
1956      "name": "OpenAI: GPT-5.1 Chat",
1957      "cost_per_1m_in": 1.25,
1958      "cost_per_1m_out": 10,
1959      "cost_per_1m_in_cached": 0,
1960      "cost_per_1m_out_cached": 0.125,
1961      "context_window": 128000,
1962      "default_max_tokens": 8192,
1963      "can_reason": false,
1964      "supports_attachments": true
1965    },
1966    {
1967      "id": "openai/gpt-5.1-codex",
1968      "name": "OpenAI: GPT-5.1-Codex",
1969      "cost_per_1m_in": 1.25,
1970      "cost_per_1m_out": 10,
1971      "cost_per_1m_in_cached": 0,
1972      "cost_per_1m_out_cached": 0.125,
1973      "context_window": 400000,
1974      "default_max_tokens": 64000,
1975      "can_reason": true,
1976      "reasoning_levels": [
1977        "low",
1978        "medium",
1979        "high"
1980      ],
1981      "default_reasoning_effort": "medium",
1982      "supports_attachments": true
1983    },
1984    {
1985      "id": "openai/gpt-5.1-codex-max",
1986      "name": "OpenAI: GPT-5.1-Codex-Max",
1987      "cost_per_1m_in": 1.25,
1988      "cost_per_1m_out": 10,
1989      "cost_per_1m_in_cached": 0,
1990      "cost_per_1m_out_cached": 0.125,
1991      "context_window": 400000,
1992      "default_max_tokens": 64000,
1993      "can_reason": true,
1994      "reasoning_levels": [
1995        "low",
1996        "medium",
1997        "high"
1998      ],
1999      "default_reasoning_effort": "medium",
2000      "supports_attachments": true
2001    },
2002    {
2003      "id": "openai/gpt-5.1-codex-mini",
2004      "name": "OpenAI: GPT-5.1-Codex-Mini",
2005      "cost_per_1m_in": 0.25,
2006      "cost_per_1m_out": 2,
2007      "cost_per_1m_in_cached": 0,
2008      "cost_per_1m_out_cached": 0.025,
2009      "context_window": 400000,
2010      "default_max_tokens": 50000,
2011      "can_reason": true,
2012      "reasoning_levels": [
2013        "low",
2014        "medium",
2015        "high"
2016      ],
2017      "default_reasoning_effort": "medium",
2018      "supports_attachments": true
2019    },
2020    {
2021      "id": "openai/gpt-5.2",
2022      "name": "OpenAI: GPT-5.2",
2023      "cost_per_1m_in": 1.75,
2024      "cost_per_1m_out": 14,
2025      "cost_per_1m_in_cached": 0,
2026      "cost_per_1m_out_cached": 0.175,
2027      "context_window": 400000,
2028      "default_max_tokens": 64000,
2029      "can_reason": true,
2030      "reasoning_levels": [
2031        "low",
2032        "medium",
2033        "high"
2034      ],
2035      "default_reasoning_effort": "medium",
2036      "supports_attachments": true
2037    },
2038    {
2039      "id": "openai/gpt-5.2-chat",
2040      "name": "OpenAI: GPT-5.2 Chat",
2041      "cost_per_1m_in": 1.75,
2042      "cost_per_1m_out": 14,
2043      "cost_per_1m_in_cached": 0,
2044      "cost_per_1m_out_cached": 0.175,
2045      "context_window": 128000,
2046      "default_max_tokens": 8192,
2047      "can_reason": false,
2048      "supports_attachments": true
2049    },
2050    {
2051      "id": "openai/gpt-5.2-pro",
2052      "name": "OpenAI: GPT-5.2 Pro",
2053      "cost_per_1m_in": 21,
2054      "cost_per_1m_out": 168,
2055      "cost_per_1m_in_cached": 0,
2056      "cost_per_1m_out_cached": 0,
2057      "context_window": 400000,
2058      "default_max_tokens": 64000,
2059      "can_reason": true,
2060      "reasoning_levels": [
2061        "low",
2062        "medium",
2063        "high"
2064      ],
2065      "default_reasoning_effort": "medium",
2066      "supports_attachments": true
2067    },
2068    {
2069      "id": "openai/gpt-5.2-codex",
2070      "name": "OpenAI: GPT-5.2-Codex",
2071      "cost_per_1m_in": 1.75,
2072      "cost_per_1m_out": 14,
2073      "cost_per_1m_in_cached": 0,
2074      "cost_per_1m_out_cached": 0.175,
2075      "context_window": 400000,
2076      "default_max_tokens": 64000,
2077      "can_reason": true,
2078      "reasoning_levels": [
2079        "low",
2080        "medium",
2081        "high"
2082      ],
2083      "default_reasoning_effort": "medium",
2084      "supports_attachments": true
2085    },
2086    {
2087      "id": "openai/gpt-5.3-chat",
2088      "name": "OpenAI: GPT-5.3 Chat",
2089      "cost_per_1m_in": 1.75,
2090      "cost_per_1m_out": 14,
2091      "cost_per_1m_in_cached": 0,
2092      "cost_per_1m_out_cached": 0.175,
2093      "context_window": 128000,
2094      "default_max_tokens": 8192,
2095      "can_reason": false,
2096      "supports_attachments": true
2097    },
2098    {
2099      "id": "openai/gpt-5.3-codex",
2100      "name": "OpenAI: GPT-5.3-Codex",
2101      "cost_per_1m_in": 1.75,
2102      "cost_per_1m_out": 14,
2103      "cost_per_1m_in_cached": 0,
2104      "cost_per_1m_out_cached": 0.175,
2105      "context_window": 400000,
2106      "default_max_tokens": 64000,
2107      "can_reason": true,
2108      "reasoning_levels": [
2109        "low",
2110        "medium",
2111        "high"
2112      ],
2113      "default_reasoning_effort": "medium",
2114      "supports_attachments": true
2115    },
2116    {
2117      "id": "openai/gpt-5.4",
2118      "name": "OpenAI: GPT-5.4",
2119      "cost_per_1m_in": 2.5,
2120      "cost_per_1m_out": 15,
2121      "cost_per_1m_in_cached": 0,
2122      "cost_per_1m_out_cached": 0.25,
2123      "context_window": 1050000,
2124      "default_max_tokens": 64000,
2125      "can_reason": true,
2126      "reasoning_levels": [
2127        "low",
2128        "medium",
2129        "high"
2130      ],
2131      "default_reasoning_effort": "medium",
2132      "supports_attachments": true
2133    },
2134    {
2135      "id": "openai/gpt-5.4-mini",
2136      "name": "OpenAI: GPT-5.4 Mini",
2137      "cost_per_1m_in": 0.75,
2138      "cost_per_1m_out": 4.5,
2139      "cost_per_1m_in_cached": 0,
2140      "cost_per_1m_out_cached": 0.075,
2141      "context_window": 400000,
2142      "default_max_tokens": 64000,
2143      "can_reason": true,
2144      "reasoning_levels": [
2145        "low",
2146        "medium",
2147        "high"
2148      ],
2149      "default_reasoning_effort": "medium",
2150      "supports_attachments": true
2151    },
2152    {
2153      "id": "openai/gpt-5.4-nano",
2154      "name": "OpenAI: GPT-5.4 Nano",
2155      "cost_per_1m_in": 0.2,
2156      "cost_per_1m_out": 1.25,
2157      "cost_per_1m_in_cached": 0,
2158      "cost_per_1m_out_cached": 0.02,
2159      "context_window": 400000,
2160      "default_max_tokens": 64000,
2161      "can_reason": true,
2162      "reasoning_levels": [
2163        "low",
2164        "medium",
2165        "high"
2166      ],
2167      "default_reasoning_effort": "medium",
2168      "supports_attachments": true
2169    },
2170    {
2171      "id": "openai/gpt-5.4-pro",
2172      "name": "OpenAI: GPT-5.4 Pro",
2173      "cost_per_1m_in": 30,
2174      "cost_per_1m_out": 180,
2175      "cost_per_1m_in_cached": 0,
2176      "cost_per_1m_out_cached": 0,
2177      "context_window": 1050000,
2178      "default_max_tokens": 64000,
2179      "can_reason": true,
2180      "reasoning_levels": [
2181        "low",
2182        "medium",
2183        "high"
2184      ],
2185      "default_reasoning_effort": "medium",
2186      "supports_attachments": true
2187    },
2188    {
2189      "id": "openai/gpt-5.5",
2190      "name": "OpenAI: GPT-5.5",
2191      "cost_per_1m_in": 5,
2192      "cost_per_1m_out": 30,
2193      "cost_per_1m_in_cached": 0,
2194      "cost_per_1m_out_cached": 0.5,
2195      "context_window": 1050000,
2196      "default_max_tokens": 64000,
2197      "can_reason": true,
2198      "reasoning_levels": [
2199        "low",
2200        "medium",
2201        "high"
2202      ],
2203      "default_reasoning_effort": "medium",
2204      "supports_attachments": true
2205    },
2206    {
2207      "id": "openai/gpt-5.5-pro",
2208      "name": "OpenAI: GPT-5.5 Pro",
2209      "cost_per_1m_in": 30,
2210      "cost_per_1m_out": 180,
2211      "cost_per_1m_in_cached": 0,
2212      "cost_per_1m_out_cached": 0,
2213      "context_window": 1050000,
2214      "default_max_tokens": 64000,
2215      "can_reason": true,
2216      "reasoning_levels": [
2217        "low",
2218        "medium",
2219        "high"
2220      ],
2221      "default_reasoning_effort": "medium",
2222      "supports_attachments": true
2223    },
2224    {
2225      "id": "openai/gpt-oss-120b",
2226      "name": "OpenAI: gpt-oss-120b",
2227      "cost_per_1m_in": 0.1,
2228      "cost_per_1m_out": 0.49,
2229      "cost_per_1m_in_cached": 0,
2230      "cost_per_1m_out_cached": 0,
2231      "context_window": 131072,
2232      "default_max_tokens": 65536,
2233      "can_reason": true,
2234      "reasoning_levels": [
2235        "low",
2236        "medium",
2237        "high"
2238      ],
2239      "default_reasoning_effort": "medium",
2240      "supports_attachments": false
2241    },
2242    {
2243      "id": "openai/gpt-oss-120b:free",
2244      "name": "OpenAI: gpt-oss-120b (free)",
2245      "cost_per_1m_in": 0,
2246      "cost_per_1m_out": 0,
2247      "cost_per_1m_in_cached": 0,
2248      "cost_per_1m_out_cached": 0,
2249      "context_window": 131072,
2250      "default_max_tokens": 65536,
2251      "can_reason": true,
2252      "reasoning_levels": [
2253        "low",
2254        "medium",
2255        "high"
2256      ],
2257      "default_reasoning_effort": "medium",
2258      "supports_attachments": false
2259    },
2260    {
2261      "id": "openai/gpt-oss-20b",
2262      "name": "OpenAI: gpt-oss-20b",
2263      "cost_per_1m_in": 0.05,
2264      "cost_per_1m_out": 0.2,
2265      "cost_per_1m_in_cached": 0,
2266      "cost_per_1m_out_cached": 0.05,
2267      "context_window": 131072,
2268      "default_max_tokens": 65536,
2269      "can_reason": true,
2270      "reasoning_levels": [
2271        "low",
2272        "medium",
2273        "high"
2274      ],
2275      "default_reasoning_effort": "medium",
2276      "supports_attachments": false
2277    },
2278    {
2279      "id": "openai/gpt-oss-20b:free",
2280      "name": "OpenAI: gpt-oss-20b (free)",
2281      "cost_per_1m_in": 0,
2282      "cost_per_1m_out": 0,
2283      "cost_per_1m_in_cached": 0,
2284      "cost_per_1m_out_cached": 0,
2285      "context_window": 131072,
2286      "default_max_tokens": 4096,
2287      "can_reason": true,
2288      "reasoning_levels": [
2289        "low",
2290        "medium",
2291        "high"
2292      ],
2293      "default_reasoning_effort": "medium",
2294      "supports_attachments": false
2295    },
2296    {
2297      "id": "openai/gpt-oss-safeguard-20b",
2298      "name": "OpenAI: gpt-oss-safeguard-20b",
2299      "cost_per_1m_in": 0.075,
2300      "cost_per_1m_out": 0.3,
2301      "cost_per_1m_in_cached": 0,
2302      "cost_per_1m_out_cached": 0.037,
2303      "context_window": 131072,
2304      "default_max_tokens": 32768,
2305      "can_reason": true,
2306      "reasoning_levels": [
2307        "low",
2308        "medium",
2309        "high"
2310      ],
2311      "default_reasoning_effort": "medium",
2312      "supports_attachments": false
2313    },
2314    {
2315      "id": "openai/o1",
2316      "name": "OpenAI: o1",
2317      "cost_per_1m_in": 15,
2318      "cost_per_1m_out": 60,
2319      "cost_per_1m_in_cached": 0,
2320      "cost_per_1m_out_cached": 7.5,
2321      "context_window": 200000,
2322      "default_max_tokens": 50000,
2323      "can_reason": true,
2324      "reasoning_levels": [
2325        "low",
2326        "medium",
2327        "high"
2328      ],
2329      "default_reasoning_effort": "medium",
2330      "supports_attachments": true
2331    },
2332    {
2333      "id": "openai/o3",
2334      "name": "OpenAI: o3",
2335      "cost_per_1m_in": 2,
2336      "cost_per_1m_out": 8,
2337      "cost_per_1m_in_cached": 0,
2338      "cost_per_1m_out_cached": 0.5,
2339      "context_window": 200000,
2340      "default_max_tokens": 50000,
2341      "can_reason": true,
2342      "reasoning_levels": [
2343        "low",
2344        "medium",
2345        "high"
2346      ],
2347      "default_reasoning_effort": "medium",
2348      "supports_attachments": true
2349    },
2350    {
2351      "id": "openai/o3-deep-research",
2352      "name": "OpenAI: o3 Deep Research",
2353      "cost_per_1m_in": 10,
2354      "cost_per_1m_out": 40,
2355      "cost_per_1m_in_cached": 0,
2356      "cost_per_1m_out_cached": 2.5,
2357      "context_window": 200000,
2358      "default_max_tokens": 50000,
2359      "can_reason": true,
2360      "reasoning_levels": [
2361        "low",
2362        "medium",
2363        "high"
2364      ],
2365      "default_reasoning_effort": "medium",
2366      "supports_attachments": true
2367    },
2368    {
2369      "id": "openai/o3-mini",
2370      "name": "OpenAI: o3 Mini",
2371      "cost_per_1m_in": 1.1,
2372      "cost_per_1m_out": 4.4,
2373      "cost_per_1m_in_cached": 0,
2374      "cost_per_1m_out_cached": 0.55,
2375      "context_window": 200000,
2376      "default_max_tokens": 50000,
2377      "can_reason": true,
2378      "reasoning_levels": [
2379        "low",
2380        "medium",
2381        "high"
2382      ],
2383      "default_reasoning_effort": "medium",
2384      "supports_attachments": false
2385    },
2386    {
2387      "id": "openai/o3-mini-high",
2388      "name": "OpenAI: o3 Mini High",
2389      "cost_per_1m_in": 1.1,
2390      "cost_per_1m_out": 4.4,
2391      "cost_per_1m_in_cached": 0,
2392      "cost_per_1m_out_cached": 0.55,
2393      "context_window": 200000,
2394      "default_max_tokens": 50000,
2395      "can_reason": true,
2396      "reasoning_levels": [
2397        "low",
2398        "medium",
2399        "high"
2400      ],
2401      "default_reasoning_effort": "medium",
2402      "supports_attachments": false
2403    },
2404    {
2405      "id": "openai/o3-pro",
2406      "name": "OpenAI: o3 Pro",
2407      "cost_per_1m_in": 20,
2408      "cost_per_1m_out": 80,
2409      "cost_per_1m_in_cached": 0,
2410      "cost_per_1m_out_cached": 0,
2411      "context_window": 200000,
2412      "default_max_tokens": 50000,
2413      "can_reason": true,
2414      "reasoning_levels": [
2415        "low",
2416        "medium",
2417        "high"
2418      ],
2419      "default_reasoning_effort": "medium",
2420      "supports_attachments": true
2421    },
2422    {
2423      "id": "openai/o4-mini",
2424      "name": "OpenAI: o4 Mini",
2425      "cost_per_1m_in": 1.1,
2426      "cost_per_1m_out": 4.4,
2427      "cost_per_1m_in_cached": 0,
2428      "cost_per_1m_out_cached": 0.275,
2429      "context_window": 200000,
2430      "default_max_tokens": 50000,
2431      "can_reason": true,
2432      "reasoning_levels": [
2433        "low",
2434        "medium",
2435        "high"
2436      ],
2437      "default_reasoning_effort": "medium",
2438      "supports_attachments": true
2439    },
2440    {
2441      "id": "openai/o4-mini-deep-research",
2442      "name": "OpenAI: o4 Mini Deep Research",
2443      "cost_per_1m_in": 2,
2444      "cost_per_1m_out": 8,
2445      "cost_per_1m_in_cached": 0,
2446      "cost_per_1m_out_cached": 0.5,
2447      "context_window": 200000,
2448      "default_max_tokens": 50000,
2449      "can_reason": true,
2450      "reasoning_levels": [
2451        "low",
2452        "medium",
2453        "high"
2454      ],
2455      "default_reasoning_effort": "medium",
2456      "supports_attachments": true
2457    },
2458    {
2459      "id": "openai/o4-mini-high",
2460      "name": "OpenAI: o4 Mini High",
2461      "cost_per_1m_in": 1.1,
2462      "cost_per_1m_out": 4.4,
2463      "cost_per_1m_in_cached": 0,
2464      "cost_per_1m_out_cached": 0.275,
2465      "context_window": 200000,
2466      "default_max_tokens": 50000,
2467      "can_reason": true,
2468      "reasoning_levels": [
2469        "low",
2470        "medium",
2471        "high"
2472      ],
2473      "default_reasoning_effort": "medium",
2474      "supports_attachments": true
2475    },
2476    {
2477      "id": "prime-intellect/intellect-3",
2478      "name": "Prime Intellect: INTELLECT-3",
2479      "cost_per_1m_in": 0.2,
2480      "cost_per_1m_out": 1.1,
2481      "cost_per_1m_in_cached": 0,
2482      "cost_per_1m_out_cached": 0,
2483      "context_window": 131072,
2484      "default_max_tokens": 65536,
2485      "can_reason": true,
2486      "reasoning_levels": [
2487        "low",
2488        "medium",
2489        "high"
2490      ],
2491      "default_reasoning_effort": "medium",
2492      "supports_attachments": false
2493    },
2494    {
2495      "id": "qwen/qwen-2.5-72b-instruct",
2496      "name": "Qwen2.5 72B Instruct",
2497      "cost_per_1m_in": 0.12,
2498      "cost_per_1m_out": 0.39,
2499      "cost_per_1m_in_cached": 0,
2500      "cost_per_1m_out_cached": 0,
2501      "context_window": 32768,
2502      "default_max_tokens": 8192,
2503      "can_reason": false,
2504      "supports_attachments": false
2505    },
2506    {
2507      "id": "qwen/qwq-32b",
2508      "name": "Qwen: QwQ 32B",
2509      "cost_per_1m_in": 0.15,
2510      "cost_per_1m_out": 0.58,
2511      "cost_per_1m_in_cached": 0,
2512      "cost_per_1m_out_cached": 0,
2513      "context_window": 131072,
2514      "default_max_tokens": 65536,
2515      "can_reason": true,
2516      "reasoning_levels": [
2517        "low",
2518        "medium",
2519        "high"
2520      ],
2521      "default_reasoning_effort": "medium",
2522      "supports_attachments": false
2523    },
2524    {
2525      "id": "qwen/qwen-plus-2025-07-28",
2526      "name": "Qwen: Qwen Plus 0728",
2527      "cost_per_1m_in": 0.26,
2528      "cost_per_1m_out": 0.78,
2529      "cost_per_1m_in_cached": 0.325,
2530      "cost_per_1m_out_cached": 0,
2531      "context_window": 1000000,
2532      "default_max_tokens": 16384,
2533      "can_reason": false,
2534      "supports_attachments": false
2535    },
2536    {
2537      "id": "qwen/qwen-plus-2025-07-28:thinking",
2538      "name": "Qwen: Qwen Plus 0728 (thinking)",
2539      "cost_per_1m_in": 0.26,
2540      "cost_per_1m_out": 0.78,
2541      "cost_per_1m_in_cached": 0.325,
2542      "cost_per_1m_out_cached": 0,
2543      "context_window": 1000000,
2544      "default_max_tokens": 16384,
2545      "can_reason": true,
2546      "reasoning_levels": [
2547        "low",
2548        "medium",
2549        "high"
2550      ],
2551      "default_reasoning_effort": "medium",
2552      "supports_attachments": false
2553    },
2554    {
2555      "id": "qwen/qwen-vl-max",
2556      "name": "Qwen: Qwen VL Max",
2557      "cost_per_1m_in": 0.52,
2558      "cost_per_1m_out": 2.08,
2559      "cost_per_1m_in_cached": 0,
2560      "cost_per_1m_out_cached": 0,
2561      "context_window": 131072,
2562      "default_max_tokens": 16384,
2563      "can_reason": false,
2564      "supports_attachments": true
2565    },
2566    {
2567      "id": "qwen/qwen-max",
2568      "name": "Qwen: Qwen-Max ",
2569      "cost_per_1m_in": 1.04,
2570      "cost_per_1m_out": 4.16,
2571      "cost_per_1m_in_cached": 0,
2572      "cost_per_1m_out_cached": 0.208,
2573      "context_window": 32768,
2574      "default_max_tokens": 4096,
2575      "can_reason": false,
2576      "supports_attachments": false
2577    },
2578    {
2579      "id": "qwen/qwen-plus",
2580      "name": "Qwen: Qwen-Plus",
2581      "cost_per_1m_in": 0.26,
2582      "cost_per_1m_out": 0.78,
2583      "cost_per_1m_in_cached": 0.325,
2584      "cost_per_1m_out_cached": 0.052,
2585      "context_window": 1000000,
2586      "default_max_tokens": 16384,
2587      "can_reason": false,
2588      "supports_attachments": false
2589    },
2590    {
2591      "id": "qwen/qwen-turbo",
2592      "name": "Qwen: Qwen-Turbo",
2593      "cost_per_1m_in": 0.0325,
2594      "cost_per_1m_out": 0.13,
2595      "cost_per_1m_in_cached": 0,
2596      "cost_per_1m_out_cached": 0.0065,
2597      "context_window": 131072,
2598      "default_max_tokens": 4096,
2599      "can_reason": false,
2600      "supports_attachments": false
2601    },
2602    {
2603      "id": "qwen/qwen-2.5-7b-instruct",
2604      "name": "Qwen: Qwen2.5 7B Instruct",
2605      "cost_per_1m_in": 0.04,
2606      "cost_per_1m_out": 0.1,
2607      "cost_per_1m_in_cached": 0,
2608      "cost_per_1m_out_cached": 0.04,
2609      "context_window": 32768,
2610      "default_max_tokens": 4096,
2611      "can_reason": false,
2612      "supports_attachments": false
2613    },
2614    {
2615      "id": "qwen/qwen3-14b",
2616      "name": "Qwen: Qwen3 14B",
2617      "cost_per_1m_in": 0.2275,
2618      "cost_per_1m_out": 0.91,
2619      "cost_per_1m_in_cached": 0,
2620      "cost_per_1m_out_cached": 0,
2621      "context_window": 131072,
2622      "default_max_tokens": 4096,
2623      "can_reason": true,
2624      "reasoning_levels": [
2625        "low",
2626        "medium",
2627        "high"
2628      ],
2629      "default_reasoning_effort": "medium",
2630      "supports_attachments": false
2631    },
2632    {
2633      "id": "qwen/qwen3-235b-a22b",
2634      "name": "Qwen: Qwen3 235B A22B",
2635      "cost_per_1m_in": 0.455,
2636      "cost_per_1m_out": 1.82,
2637      "cost_per_1m_in_cached": 0,
2638      "cost_per_1m_out_cached": 0,
2639      "context_window": 131072,
2640      "default_max_tokens": 4096,
2641      "can_reason": true,
2642      "reasoning_levels": [
2643        "low",
2644        "medium",
2645        "high"
2646      ],
2647      "default_reasoning_effort": "medium",
2648      "supports_attachments": false
2649    },
2650    {
2651      "id": "qwen/qwen3-235b-a22b-2507",
2652      "name": "Qwen: Qwen3 235B A22B Instruct 2507",
2653      "cost_per_1m_in": 0.22,
2654      "cost_per_1m_out": 0.88,
2655      "cost_per_1m_in_cached": 0,
2656      "cost_per_1m_out_cached": 0,
2657      "context_window": 262144,
2658      "default_max_tokens": 8192,
2659      "can_reason": false,
2660      "supports_attachments": false
2661    },
2662    {
2663      "id": "qwen/qwen3-235b-a22b-thinking-2507",
2664      "name": "Qwen: Qwen3 235B A22B Thinking 2507",
2665      "cost_per_1m_in": 0.1495,
2666      "cost_per_1m_out": 1.495,
2667      "cost_per_1m_in_cached": 0,
2668      "cost_per_1m_out_cached": 0,
2669      "context_window": 131072,
2670      "default_max_tokens": 13107,
2671      "can_reason": true,
2672      "reasoning_levels": [
2673        "low",
2674        "medium",
2675        "high"
2676      ],
2677      "default_reasoning_effort": "medium",
2678      "supports_attachments": false
2679    },
2680    {
2681      "id": "qwen/qwen3-30b-a3b",
2682      "name": "Qwen: Qwen3 30B A3B",
2683      "cost_per_1m_in": 0.13,
2684      "cost_per_1m_out": 0.52,
2685      "cost_per_1m_in_cached": 0,
2686      "cost_per_1m_out_cached": 0,
2687      "context_window": 131072,
2688      "default_max_tokens": 4096,
2689      "can_reason": true,
2690      "reasoning_levels": [
2691        "low",
2692        "medium",
2693        "high"
2694      ],
2695      "default_reasoning_effort": "medium",
2696      "supports_attachments": false
2697    },
2698    {
2699      "id": "qwen/qwen3-30b-a3b-instruct-2507",
2700      "name": "Qwen: Qwen3 30B A3B Instruct 2507",
2701      "cost_per_1m_in": 0.1,
2702      "cost_per_1m_out": 0.3,
2703      "cost_per_1m_in_cached": 0,
2704      "cost_per_1m_out_cached": 0,
2705      "context_window": 262144,
2706      "default_max_tokens": 26214,
2707      "can_reason": false,
2708      "supports_attachments": false
2709    },
2710    {
2711      "id": "qwen/qwen3-30b-a3b-thinking-2507",
2712      "name": "Qwen: Qwen3 30B A3B Thinking 2507",
2713      "cost_per_1m_in": 0.08,
2714      "cost_per_1m_out": 0.4,
2715      "cost_per_1m_in_cached": 0,
2716      "cost_per_1m_out_cached": 0.08,
2717      "context_window": 131072,
2718      "default_max_tokens": 65536,
2719      "can_reason": true,
2720      "reasoning_levels": [
2721        "low",
2722        "medium",
2723        "high"
2724      ],
2725      "default_reasoning_effort": "medium",
2726      "supports_attachments": false
2727    },
2728    {
2729      "id": "qwen/qwen3-32b",
2730      "name": "Qwen: Qwen3 32B",
2731      "cost_per_1m_in": 0.104,
2732      "cost_per_1m_out": 0.416,
2733      "cost_per_1m_in_cached": 0,
2734      "cost_per_1m_out_cached": 0,
2735      "context_window": 131072,
2736      "default_max_tokens": 4096,
2737      "can_reason": true,
2738      "reasoning_levels": [
2739        "low",
2740        "medium",
2741        "high"
2742      ],
2743      "default_reasoning_effort": "medium",
2744      "supports_attachments": false
2745    },
2746    {
2747      "id": "qwen/qwen3-8b",
2748      "name": "Qwen: Qwen3 8B",
2749      "cost_per_1m_in": 0.117,
2750      "cost_per_1m_out": 0.455,
2751      "cost_per_1m_in_cached": 0,
2752      "cost_per_1m_out_cached": 0,
2753      "context_window": 131072,
2754      "default_max_tokens": 4096,
2755      "can_reason": true,
2756      "reasoning_levels": [
2757        "low",
2758        "medium",
2759        "high"
2760      ],
2761      "default_reasoning_effort": "medium",
2762      "supports_attachments": false
2763    },
2764    {
2765      "id": "qwen/qwen3-coder-30b-a3b-instruct",
2766      "name": "Qwen: Qwen3 Coder 30B A3B Instruct",
2767      "cost_per_1m_in": 0.2925,
2768      "cost_per_1m_out": 1.4625,
2769      "cost_per_1m_in_cached": 0,
2770      "cost_per_1m_out_cached": 0,
2771      "context_window": 262144,
2772      "default_max_tokens": 32768,
2773      "can_reason": false,
2774      "supports_attachments": false
2775    },
2776    {
2777      "id": "qwen/qwen3-coder",
2778      "name": "Qwen: Qwen3 Coder 480B A35B",
2779      "cost_per_1m_in": 0.22,
2780      "cost_per_1m_out": 1.8,
2781      "cost_per_1m_in_cached": 0,
2782      "cost_per_1m_out_cached": 0,
2783      "context_window": 262144,
2784      "default_max_tokens": 32768,
2785      "can_reason": false,
2786      "supports_attachments": false
2787    },
2788    {
2789      "id": "qwen/qwen3-coder:free",
2790      "name": "Qwen: Qwen3 Coder 480B A35B (free)",
2791      "cost_per_1m_in": 0,
2792      "cost_per_1m_out": 0,
2793      "cost_per_1m_in_cached": 0,
2794      "cost_per_1m_out_cached": 0,
2795      "context_window": 262000,
2796      "default_max_tokens": 131000,
2797      "can_reason": false,
2798      "supports_attachments": false
2799    },
2800    {
2801      "id": "qwen/qwen3-coder-flash",
2802      "name": "Qwen: Qwen3 Coder Flash",
2803      "cost_per_1m_in": 0.195,
2804      "cost_per_1m_out": 0.975,
2805      "cost_per_1m_in_cached": 0.24375,
2806      "cost_per_1m_out_cached": 0.039,
2807      "context_window": 1000000,
2808      "default_max_tokens": 32768,
2809      "can_reason": false,
2810      "supports_attachments": false
2811    },
2812    {
2813      "id": "qwen/qwen3-coder-next",
2814      "name": "Qwen: Qwen3 Coder Next",
2815      "cost_per_1m_in": 0.15,
2816      "cost_per_1m_out": 0.8,
2817      "cost_per_1m_in_cached": 0,
2818      "cost_per_1m_out_cached": 0.1,
2819      "context_window": 262144,
2820      "default_max_tokens": 131072,
2821      "can_reason": false,
2822      "supports_attachments": false
2823    },
2824    {
2825      "id": "qwen/qwen3-coder-plus",
2826      "name": "Qwen: Qwen3 Coder Plus",
2827      "cost_per_1m_in": 0.65,
2828      "cost_per_1m_out": 3.25,
2829      "cost_per_1m_in_cached": 0.8125,
2830      "cost_per_1m_out_cached": 0.13,
2831      "context_window": 1000000,
2832      "default_max_tokens": 32768,
2833      "can_reason": false,
2834      "supports_attachments": false
2835    },
2836    {
2837      "id": "qwen/qwen3-max",
2838      "name": "Qwen: Qwen3 Max",
2839      "cost_per_1m_in": 0.78,
2840      "cost_per_1m_out": 3.9,
2841      "cost_per_1m_in_cached": 0.975,
2842      "cost_per_1m_out_cached": 0.156,
2843      "context_window": 262144,
2844      "default_max_tokens": 16384,
2845      "can_reason": false,
2846      "supports_attachments": false
2847    },
2848    {
2849      "id": "qwen/qwen3-max-thinking",
2850      "name": "Qwen: Qwen3 Max Thinking",
2851      "cost_per_1m_in": 0.78,
2852      "cost_per_1m_out": 3.9,
2853      "cost_per_1m_in_cached": 0,
2854      "cost_per_1m_out_cached": 0,
2855      "context_window": 262144,
2856      "default_max_tokens": 16384,
2857      "can_reason": true,
2858      "reasoning_levels": [
2859        "low",
2860        "medium",
2861        "high"
2862      ],
2863      "default_reasoning_effort": "medium",
2864      "supports_attachments": false
2865    },
2866    {
2867      "id": "qwen/qwen3-next-80b-a3b-instruct",
2868      "name": "Qwen: Qwen3 Next 80B A3B Instruct",
2869      "cost_per_1m_in": 0.1,
2870      "cost_per_1m_out": 1.1,
2871      "cost_per_1m_in_cached": 0,
2872      "cost_per_1m_out_cached": 0.07,
2873      "context_window": 262144,
2874      "default_max_tokens": 131072,
2875      "can_reason": false,
2876      "supports_attachments": false
2877    },
2878    {
2879      "id": "qwen/qwen3-next-80b-a3b-instruct:free",
2880      "name": "Qwen: Qwen3 Next 80B A3B Instruct (free)",
2881      "cost_per_1m_in": 0,
2882      "cost_per_1m_out": 0,
2883      "cost_per_1m_in_cached": 0,
2884      "cost_per_1m_out_cached": 0,
2885      "context_window": 262144,
2886      "default_max_tokens": 26214,
2887      "can_reason": false,
2888      "supports_attachments": false
2889    },
2890    {
2891      "id": "qwen/qwen3-next-80b-a3b-thinking",
2892      "name": "Qwen: Qwen3 Next 80B A3B Thinking",
2893      "cost_per_1m_in": 0.0975,
2894      "cost_per_1m_out": 0.78,
2895      "cost_per_1m_in_cached": 0,
2896      "cost_per_1m_out_cached": 0,
2897      "context_window": 131072,
2898      "default_max_tokens": 16384,
2899      "can_reason": true,
2900      "reasoning_levels": [
2901        "low",
2902        "medium",
2903        "high"
2904      ],
2905      "default_reasoning_effort": "medium",
2906      "supports_attachments": false
2907    },
2908    {
2909      "id": "qwen/qwen3-vl-235b-a22b-instruct",
2910      "name": "Qwen: Qwen3 VL 235B A22B Instruct",
2911      "cost_per_1m_in": 0.25,
2912      "cost_per_1m_out": 1.5,
2913      "cost_per_1m_in_cached": 0,
2914      "cost_per_1m_out_cached": 0,
2915      "context_window": 256000,
2916      "default_max_tokens": 8192,
2917      "can_reason": false,
2918      "supports_attachments": true
2919    },
2920    {
2921      "id": "qwen/qwen3-vl-235b-a22b-thinking",
2922      "name": "Qwen: Qwen3 VL 235B A22B Thinking",
2923      "cost_per_1m_in": 0.26,
2924      "cost_per_1m_out": 2.6,
2925      "cost_per_1m_in_cached": 0,
2926      "cost_per_1m_out_cached": 0,
2927      "context_window": 131072,
2928      "default_max_tokens": 16384,
2929      "can_reason": true,
2930      "reasoning_levels": [
2931        "low",
2932        "medium",
2933        "high"
2934      ],
2935      "default_reasoning_effort": "medium",
2936      "supports_attachments": true
2937    },
2938    {
2939      "id": "qwen/qwen3-vl-30b-a3b-instruct",
2940      "name": "Qwen: Qwen3 VL 30B A3B Instruct",
2941      "cost_per_1m_in": 0.29,
2942      "cost_per_1m_out": 1,
2943      "cost_per_1m_in_cached": 0,
2944      "cost_per_1m_out_cached": 0,
2945      "context_window": 262144,
2946      "default_max_tokens": 131072,
2947      "can_reason": false,
2948      "supports_attachments": true
2949    },
2950    {
2951      "id": "qwen/qwen3-vl-30b-a3b-thinking",
2952      "name": "Qwen: Qwen3 VL 30B A3B Thinking",
2953      "cost_per_1m_in": 0.13,
2954      "cost_per_1m_out": 1.56,
2955      "cost_per_1m_in_cached": 0,
2956      "cost_per_1m_out_cached": 0,
2957      "context_window": 131072,
2958      "default_max_tokens": 16384,
2959      "can_reason": true,
2960      "reasoning_levels": [
2961        "low",
2962        "medium",
2963        "high"
2964      ],
2965      "default_reasoning_effort": "medium",
2966      "supports_attachments": true
2967    },
2968    {
2969      "id": "qwen/qwen3-vl-32b-instruct",
2970      "name": "Qwen: Qwen3 VL 32B Instruct",
2971      "cost_per_1m_in": 0.104,
2972      "cost_per_1m_out": 0.416,
2973      "cost_per_1m_in_cached": 0,
2974      "cost_per_1m_out_cached": 0,
2975      "context_window": 131072,
2976      "default_max_tokens": 16384,
2977      "can_reason": false,
2978      "supports_attachments": true
2979    },
2980    {
2981      "id": "qwen/qwen3-vl-8b-instruct",
2982      "name": "Qwen: Qwen3 VL 8B Instruct",
2983      "cost_per_1m_in": 0.25,
2984      "cost_per_1m_out": 0.75,
2985      "cost_per_1m_in_cached": 0,
2986      "cost_per_1m_out_cached": 0.12,
2987      "context_window": 262144,
2988      "default_max_tokens": 131072,
2989      "can_reason": false,
2990      "supports_attachments": true
2991    },
2992    {
2993      "id": "qwen/qwen3-vl-8b-thinking",
2994      "name": "Qwen: Qwen3 VL 8B Thinking",
2995      "cost_per_1m_in": 0.117,
2996      "cost_per_1m_out": 1.365,
2997      "cost_per_1m_in_cached": 0,
2998      "cost_per_1m_out_cached": 0,
2999      "context_window": 131072,
3000      "default_max_tokens": 16384,
3001      "can_reason": true,
3002      "reasoning_levels": [
3003        "low",
3004        "medium",
3005        "high"
3006      ],
3007      "default_reasoning_effort": "medium",
3008      "supports_attachments": true
3009    },
3010    {
3011      "id": "qwen/qwen3.5-397b-a17b",
3012      "name": "Qwen: Qwen3.5 397B A17B",
3013      "cost_per_1m_in": 0.39,
3014      "cost_per_1m_out": 2.34,
3015      "cost_per_1m_in_cached": 0,
3016      "cost_per_1m_out_cached": 0.195,
3017      "context_window": 262144,
3018      "default_max_tokens": 32768,
3019      "can_reason": true,
3020      "reasoning_levels": [
3021        "low",
3022        "medium",
3023        "high"
3024      ],
3025      "default_reasoning_effort": "medium",
3026      "supports_attachments": true
3027    },
3028    {
3029      "id": "qwen/qwen3.5-plus-02-15",
3030      "name": "Qwen: Qwen3.5 Plus 2026-02-15",
3031      "cost_per_1m_in": 0.26,
3032      "cost_per_1m_out": 1.56,
3033      "cost_per_1m_in_cached": 0.325,
3034      "cost_per_1m_out_cached": 0,
3035      "context_window": 1000000,
3036      "default_max_tokens": 32768,
3037      "can_reason": true,
3038      "reasoning_levels": [
3039        "low",
3040        "medium",
3041        "high"
3042      ],
3043      "default_reasoning_effort": "medium",
3044      "supports_attachments": true
3045    },
3046    {
3047      "id": "qwen/qwen3.5-122b-a10b",
3048      "name": "Qwen: Qwen3.5-122B-A10B",
3049      "cost_per_1m_in": 0.3,
3050      "cost_per_1m_out": 2.4,
3051      "cost_per_1m_in_cached": 0,
3052      "cost_per_1m_out_cached": 0.3,
3053      "context_window": 262144,
3054      "default_max_tokens": 32768,
3055      "can_reason": true,
3056      "reasoning_levels": [
3057        "low",
3058        "medium",
3059        "high"
3060      ],
3061      "default_reasoning_effort": "medium",
3062      "supports_attachments": true
3063    },
3064    {
3065      "id": "qwen/qwen3.5-27b",
3066      "name": "Qwen: Qwen3.5-27B",
3067      "cost_per_1m_in": 0.3,
3068      "cost_per_1m_out": 2.4,
3069      "cost_per_1m_in_cached": 0,
3070      "cost_per_1m_out_cached": 0,
3071      "context_window": 262144,
3072      "default_max_tokens": 32768,
3073      "can_reason": true,
3074      "reasoning_levels": [
3075        "low",
3076        "medium",
3077        "high"
3078      ],
3079      "default_reasoning_effort": "medium",
3080      "supports_attachments": true
3081    },
3082    {
3083      "id": "qwen/qwen3.5-35b-a3b",
3084      "name": "Qwen: Qwen3.5-35B-A3B",
3085      "cost_per_1m_in": 0.225,
3086      "cost_per_1m_out": 1.8,
3087      "cost_per_1m_in_cached": 0,
3088      "cost_per_1m_out_cached": 0.225,
3089      "context_window": 262144,
3090      "default_max_tokens": 32768,
3091      "can_reason": true,
3092      "reasoning_levels": [
3093        "low",
3094        "medium",
3095        "high"
3096      ],
3097      "default_reasoning_effort": "medium",
3098      "supports_attachments": true
3099    },
3100    {
3101      "id": "qwen/qwen3.5-9b",
3102      "name": "Qwen: Qwen3.5-9B",
3103      "cost_per_1m_in": 0.1,
3104      "cost_per_1m_out": 0.15,
3105      "cost_per_1m_in_cached": 0,
3106      "cost_per_1m_out_cached": 0,
3107      "context_window": 262144,
3108      "default_max_tokens": 26214,
3109      "can_reason": true,
3110      "reasoning_levels": [
3111        "low",
3112        "medium",
3113        "high"
3114      ],
3115      "default_reasoning_effort": "medium",
3116      "supports_attachments": true
3117    },
3118    {
3119      "id": "qwen/qwen3.5-flash-02-23",
3120      "name": "Qwen: Qwen3.5-Flash",
3121      "cost_per_1m_in": 0.065,
3122      "cost_per_1m_out": 0.26,
3123      "cost_per_1m_in_cached": 0.08125,
3124      "cost_per_1m_out_cached": 0,
3125      "context_window": 1000000,
3126      "default_max_tokens": 32768,
3127      "can_reason": true,
3128      "reasoning_levels": [
3129        "low",
3130        "medium",
3131        "high"
3132      ],
3133      "default_reasoning_effort": "medium",
3134      "supports_attachments": true
3135    },
3136    {
3137      "id": "qwen/qwen3.6-plus",
3138      "name": "Qwen: Qwen3.6 Plus",
3139      "cost_per_1m_in": 0.325,
3140      "cost_per_1m_out": 1.95,
3141      "cost_per_1m_in_cached": 0.40625,
3142      "cost_per_1m_out_cached": 0,
3143      "context_window": 1000000,
3144      "default_max_tokens": 32768,
3145      "can_reason": true,
3146      "reasoning_levels": [
3147        "low",
3148        "medium",
3149        "high"
3150      ],
3151      "default_reasoning_effort": "medium",
3152      "supports_attachments": true
3153    },
3154    {
3155      "id": "relace/relace-search",
3156      "name": "Relace: Relace Search",
3157      "cost_per_1m_in": 1,
3158      "cost_per_1m_out": 3,
3159      "cost_per_1m_in_cached": 0,
3160      "cost_per_1m_out_cached": 0,
3161      "context_window": 256000,
3162      "default_max_tokens": 64000,
3163      "can_reason": false,
3164      "supports_attachments": false
3165    },
3166    {
3167      "id": "stepfun/step-3.5-flash",
3168      "name": "StepFun: Step 3.5 Flash",
3169      "cost_per_1m_in": 0.1,
3170      "cost_per_1m_out": 0.3,
3171      "cost_per_1m_in_cached": 0,
3172      "cost_per_1m_out_cached": 0,
3173      "context_window": 262144,
3174      "default_max_tokens": 32768,
3175      "can_reason": true,
3176      "reasoning_levels": [
3177        "low",
3178        "medium",
3179        "high"
3180      ],
3181      "default_reasoning_effort": "medium",
3182      "supports_attachments": false
3183    },
3184    {
3185      "id": "tngtech/deepseek-r1t2-chimera",
3186      "name": "TNG: DeepSeek R1T2 Chimera",
3187      "cost_per_1m_in": 0.3,
3188      "cost_per_1m_out": 1.1,
3189      "cost_per_1m_in_cached": 0,
3190      "cost_per_1m_out_cached": 0.15,
3191      "context_window": 163840,
3192      "default_max_tokens": 81920,
3193      "can_reason": true,
3194      "reasoning_levels": [
3195        "low",
3196        "medium",
3197        "high"
3198      ],
3199      "default_reasoning_effort": "medium",
3200      "supports_attachments": false
3201    },
3202    {
3203      "id": "tencent/hy3-preview:free",
3204      "name": "Tencent: Hy3 preview (free)",
3205      "cost_per_1m_in": 0,
3206      "cost_per_1m_out": 0,
3207      "cost_per_1m_in_cached": 0,
3208      "cost_per_1m_out_cached": 0,
3209      "context_window": 262144,
3210      "default_max_tokens": 131072,
3211      "can_reason": true,
3212      "reasoning_levels": [
3213        "low",
3214        "medium",
3215        "high"
3216      ],
3217      "default_reasoning_effort": "medium",
3218      "supports_attachments": false
3219    },
3220    {
3221      "id": "thedrummer/rocinante-12b",
3222      "name": "TheDrummer: Rocinante 12B",
3223      "cost_per_1m_in": 0.17,
3224      "cost_per_1m_out": 0.43,
3225      "cost_per_1m_in_cached": 0,
3226      "cost_per_1m_out_cached": 0,
3227      "context_window": 32768,
3228      "default_max_tokens": 16384,
3229      "can_reason": false,
3230      "supports_attachments": false
3231    },
3232    {
3233      "id": "thedrummer/unslopnemo-12b",
3234      "name": "TheDrummer: UnslopNemo 12B",
3235      "cost_per_1m_in": 0.4,
3236      "cost_per_1m_out": 0.4,
3237      "cost_per_1m_in_cached": 0,
3238      "cost_per_1m_out_cached": 0,
3239      "context_window": 32768,
3240      "default_max_tokens": 16384,
3241      "can_reason": false,
3242      "supports_attachments": false
3243    },
3244    {
3245      "id": "alibaba/tongyi-deepresearch-30b-a3b",
3246      "name": "Tongyi DeepResearch 30B A3B",
3247      "cost_per_1m_in": 0.09,
3248      "cost_per_1m_out": 0.45,
3249      "cost_per_1m_in_cached": 0,
3250      "cost_per_1m_out_cached": 0.09,
3251      "context_window": 131072,
3252      "default_max_tokens": 65536,
3253      "can_reason": true,
3254      "reasoning_levels": [
3255        "low",
3256        "medium",
3257        "high"
3258      ],
3259      "default_reasoning_effort": "medium",
3260      "supports_attachments": false
3261    },
3262    {
3263      "id": "upstage/solar-pro-3",
3264      "name": "Upstage: Solar Pro 3",
3265      "cost_per_1m_in": 0.15,
3266      "cost_per_1m_out": 0.6,
3267      "cost_per_1m_in_cached": 0,
3268      "cost_per_1m_out_cached": 0.015,
3269      "context_window": 128000,
3270      "default_max_tokens": 12800,
3271      "can_reason": true,
3272      "reasoning_levels": [
3273        "low",
3274        "medium",
3275        "high"
3276      ],
3277      "default_reasoning_effort": "medium",
3278      "supports_attachments": false
3279    },
3280    {
3281      "id": "xiaomi/mimo-v2-flash",
3282      "name": "Xiaomi: MiMo-V2-Flash",
3283      "cost_per_1m_in": 0.1,
3284      "cost_per_1m_out": 0.3,
3285      "cost_per_1m_in_cached": 0,
3286      "cost_per_1m_out_cached": 0.02,
3287      "context_window": 262144,
3288      "default_max_tokens": 16000,
3289      "can_reason": true,
3290      "reasoning_levels": [
3291        "low",
3292        "medium",
3293        "high"
3294      ],
3295      "default_reasoning_effort": "medium",
3296      "supports_attachments": false
3297    },
3298    {
3299      "id": "xiaomi/mimo-v2-omni",
3300      "name": "Xiaomi: MiMo-V2-Omni",
3301      "cost_per_1m_in": 0.4,
3302      "cost_per_1m_out": 2,
3303      "cost_per_1m_in_cached": 0,
3304      "cost_per_1m_out_cached": 0.08,
3305      "context_window": 262144,
3306      "default_max_tokens": 32768,
3307      "can_reason": true,
3308      "reasoning_levels": [
3309        "low",
3310        "medium",
3311        "high"
3312      ],
3313      "default_reasoning_effort": "medium",
3314      "supports_attachments": true
3315    },
3316    {
3317      "id": "xiaomi/mimo-v2-pro",
3318      "name": "Xiaomi: MiMo-V2-Pro",
3319      "cost_per_1m_in": 1,
3320      "cost_per_1m_out": 3,
3321      "cost_per_1m_in_cached": 0,
3322      "cost_per_1m_out_cached": 0.2,
3323      "context_window": 1048576,
3324      "default_max_tokens": 65536,
3325      "can_reason": true,
3326      "reasoning_levels": [
3327        "low",
3328        "medium",
3329        "high"
3330      ],
3331      "default_reasoning_effort": "medium",
3332      "supports_attachments": false
3333    },
3334    {
3335      "id": "xiaomi/mimo-v2.5",
3336      "name": "Xiaomi: MiMo-V2.5",
3337      "cost_per_1m_in": 0.4,
3338      "cost_per_1m_out": 2,
3339      "cost_per_1m_in_cached": 0,
3340      "cost_per_1m_out_cached": 0.08,
3341      "context_window": 1048576,
3342      "default_max_tokens": 65536,
3343      "can_reason": true,
3344      "reasoning_levels": [
3345        "low",
3346        "medium",
3347        "high"
3348      ],
3349      "default_reasoning_effort": "medium",
3350      "supports_attachments": true
3351    },
3352    {
3353      "id": "xiaomi/mimo-v2.5-pro",
3354      "name": "Xiaomi: MiMo-V2.5-Pro",
3355      "cost_per_1m_in": 1,
3356      "cost_per_1m_out": 3,
3357      "cost_per_1m_in_cached": 0,
3358      "cost_per_1m_out_cached": 0.2,
3359      "context_window": 1048576,
3360      "default_max_tokens": 65536,
3361      "can_reason": true,
3362      "reasoning_levels": [
3363        "low",
3364        "medium",
3365        "high"
3366      ],
3367      "default_reasoning_effort": "medium",
3368      "supports_attachments": false
3369    },
3370    {
3371      "id": "z-ai/glm-4-32b",
3372      "name": "Z.ai: GLM 4 32B ",
3373      "cost_per_1m_in": 0.1,
3374      "cost_per_1m_out": 0.1,
3375      "cost_per_1m_in_cached": 0,
3376      "cost_per_1m_out_cached": 0,
3377      "context_window": 128000,
3378      "default_max_tokens": 12800,
3379      "can_reason": false,
3380      "supports_attachments": false
3381    },
3382    {
3383      "id": "z-ai/glm-4.5",
3384      "name": "Z.ai: GLM 4.5",
3385      "cost_per_1m_in": 0.6,
3386      "cost_per_1m_out": 2.2,
3387      "cost_per_1m_in_cached": 0,
3388      "cost_per_1m_out_cached": 0.11,
3389      "context_window": 131072,
3390      "default_max_tokens": 48000,
3391      "can_reason": true,
3392      "reasoning_levels": [
3393        "low",
3394        "medium",
3395        "high"
3396      ],
3397      "default_reasoning_effort": "medium",
3398      "supports_attachments": false
3399    },
3400    {
3401      "id": "z-ai/glm-4.5-air",
3402      "name": "Z.ai: GLM 4.5 Air",
3403      "cost_per_1m_in": 0.13,
3404      "cost_per_1m_out": 0.85,
3405      "cost_per_1m_in_cached": 0,
3406      "cost_per_1m_out_cached": 0.025,
3407      "context_window": 131072,
3408      "default_max_tokens": 49152,
3409      "can_reason": true,
3410      "reasoning_levels": [
3411        "low",
3412        "medium",
3413        "high"
3414      ],
3415      "default_reasoning_effort": "medium",
3416      "supports_attachments": false
3417    },
3418    {
3419      "id": "z-ai/glm-4.5-air:free",
3420      "name": "Z.ai: GLM 4.5 Air (free)",
3421      "cost_per_1m_in": 0,
3422      "cost_per_1m_out": 0,
3423      "cost_per_1m_in_cached": 0,
3424      "cost_per_1m_out_cached": 0,
3425      "context_window": 131072,
3426      "default_max_tokens": 48000,
3427      "can_reason": true,
3428      "reasoning_levels": [
3429        "low",
3430        "medium",
3431        "high"
3432      ],
3433      "default_reasoning_effort": "medium",
3434      "supports_attachments": false
3435    },
3436    {
3437      "id": "z-ai/glm-4.5v",
3438      "name": "Z.ai: GLM 4.5V",
3439      "cost_per_1m_in": 0.6,
3440      "cost_per_1m_out": 1.8,
3441      "cost_per_1m_in_cached": 0,
3442      "cost_per_1m_out_cached": 0.11,
3443      "context_window": 65536,
3444      "default_max_tokens": 8192,
3445      "can_reason": true,
3446      "reasoning_levels": [
3447        "low",
3448        "medium",
3449        "high"
3450      ],
3451      "default_reasoning_effort": "medium",
3452      "supports_attachments": true
3453    },
3454    {
3455      "id": "z-ai/glm-4.6",
3456      "name": "Z.ai: GLM 4.6",
3457      "cost_per_1m_in": 0.39,
3458      "cost_per_1m_out": 1.9,
3459      "cost_per_1m_in_cached": 0,
3460      "cost_per_1m_out_cached": 0,
3461      "context_window": 204800,
3462      "default_max_tokens": 102400,
3463      "can_reason": true,
3464      "reasoning_levels": [
3465        "low",
3466        "medium",
3467        "high"
3468      ],
3469      "default_reasoning_effort": "medium",
3470      "supports_attachments": false
3471    },
3472    {
3473      "id": "z-ai/glm-4.6v",
3474      "name": "Z.ai: GLM 4.6V",
3475      "cost_per_1m_in": 0.3,
3476      "cost_per_1m_out": 0.9,
3477      "cost_per_1m_in_cached": 0,
3478      "cost_per_1m_out_cached": 0,
3479      "context_window": 131072,
3480      "default_max_tokens": 65536,
3481      "can_reason": true,
3482      "reasoning_levels": [
3483        "low",
3484        "medium",
3485        "high"
3486      ],
3487      "default_reasoning_effort": "medium",
3488      "supports_attachments": true
3489    },
3490    {
3491      "id": "z-ai/glm-4.7",
3492      "name": "Z.ai: GLM 4.7",
3493      "cost_per_1m_in": 0.45,
3494      "cost_per_1m_out": 2.2,
3495      "cost_per_1m_in_cached": 0,
3496      "cost_per_1m_out_cached": 0.11,
3497      "context_window": 204800,
3498      "default_max_tokens": 102400,
3499      "can_reason": true,
3500      "reasoning_levels": [
3501        "low",
3502        "medium",
3503        "high"
3504      ],
3505      "default_reasoning_effort": "medium",
3506      "supports_attachments": false
3507    },
3508    {
3509      "id": "z-ai/glm-4.7-flash",
3510      "name": "Z.ai: GLM 4.7 Flash",
3511      "cost_per_1m_in": 0.1,
3512      "cost_per_1m_out": 0.43,
3513      "cost_per_1m_in_cached": 0,
3514      "cost_per_1m_out_cached": 0,
3515      "context_window": 202752,
3516      "default_max_tokens": 101376,
3517      "can_reason": true,
3518      "reasoning_levels": [
3519        "low",
3520        "medium",
3521        "high"
3522      ],
3523      "default_reasoning_effort": "medium",
3524      "supports_attachments": false
3525    },
3526    {
3527      "id": "z-ai/glm-5",
3528      "name": "Z.ai: GLM 5",
3529      "cost_per_1m_in": 0.95,
3530      "cost_per_1m_out": 2.55,
3531      "cost_per_1m_in_cached": 0,
3532      "cost_per_1m_out_cached": 0.2,
3533      "context_window": 204800,
3534      "default_max_tokens": 65536,
3535      "can_reason": true,
3536      "reasoning_levels": [
3537        "low",
3538        "medium",
3539        "high"
3540      ],
3541      "default_reasoning_effort": "medium",
3542      "supports_attachments": false
3543    },
3544    {
3545      "id": "z-ai/glm-5-turbo",
3546      "name": "Z.ai: GLM 5 Turbo",
3547      "cost_per_1m_in": 1.2,
3548      "cost_per_1m_out": 4,
3549      "cost_per_1m_in_cached": 0,
3550      "cost_per_1m_out_cached": 0.24,
3551      "context_window": 262144,
3552      "default_max_tokens": 65536,
3553      "can_reason": true,
3554      "reasoning_levels": [
3555        "low",
3556        "medium",
3557        "high"
3558      ],
3559      "default_reasoning_effort": "medium",
3560      "supports_attachments": false
3561    },
3562    {
3563      "id": "z-ai/glm-5.1",
3564      "name": "Z.ai: GLM 5.1",
3565      "cost_per_1m_in": 1.4,
3566      "cost_per_1m_out": 4.4,
3567      "cost_per_1m_in_cached": 0,
3568      "cost_per_1m_out_cached": 0.26,
3569      "context_window": 204800,
3570      "default_max_tokens": 65536,
3571      "can_reason": true,
3572      "reasoning_levels": [
3573        "low",
3574        "medium",
3575        "high"
3576      ],
3577      "default_reasoning_effort": "medium",
3578      "supports_attachments": false
3579    },
3580    {
3581      "id": "z-ai/glm-5v-turbo",
3582      "name": "Z.ai: GLM 5V Turbo",
3583      "cost_per_1m_in": 1.2,
3584      "cost_per_1m_out": 4,
3585      "cost_per_1m_in_cached": 0,
3586      "cost_per_1m_out_cached": 0.24,
3587      "context_window": 202752,
3588      "default_max_tokens": 65536,
3589      "can_reason": true,
3590      "reasoning_levels": [
3591        "low",
3592        "medium",
3593        "high"
3594      ],
3595      "default_reasoning_effort": "medium",
3596      "supports_attachments": true
3597    },
3598    {
3599      "id": "inclusionai/ling-2.6-1t:free",
3600      "name": "inclusionAI: Ling-2.6-1T (free)",
3601      "cost_per_1m_in": 0,
3602      "cost_per_1m_out": 0,
3603      "cost_per_1m_in_cached": 0,
3604      "cost_per_1m_out_cached": 0,
3605      "context_window": 262144,
3606      "default_max_tokens": 16384,
3607      "can_reason": false,
3608      "supports_attachments": false
3609    },
3610    {
3611      "id": "inclusionai/ling-2.6-flash:free",
3612      "name": "inclusionAI: Ling-2.6-flash (free)",
3613      "cost_per_1m_in": 0,
3614      "cost_per_1m_out": 0,
3615      "cost_per_1m_in_cached": 0,
3616      "cost_per_1m_out_cached": 0,
3617      "context_window": 262144,
3618      "default_max_tokens": 16384,
3619      "can_reason": false,
3620      "supports_attachments": false
3621    },
3622    {
3623      "id": "x-ai/grok-3",
3624      "name": "xAI: Grok 3",
3625      "cost_per_1m_in": 3,
3626      "cost_per_1m_out": 15,
3627      "cost_per_1m_in_cached": 0,
3628      "cost_per_1m_out_cached": 0.75,
3629      "context_window": 131072,
3630      "default_max_tokens": 13107,
3631      "can_reason": false,
3632      "supports_attachments": false
3633    },
3634    {
3635      "id": "x-ai/grok-3-beta",
3636      "name": "xAI: Grok 3 Beta",
3637      "cost_per_1m_in": 3,
3638      "cost_per_1m_out": 15,
3639      "cost_per_1m_in_cached": 0,
3640      "cost_per_1m_out_cached": 0.75,
3641      "context_window": 131072,
3642      "default_max_tokens": 13107,
3643      "can_reason": false,
3644      "supports_attachments": false
3645    },
3646    {
3647      "id": "x-ai/grok-3-mini",
3648      "name": "xAI: Grok 3 Mini",
3649      "cost_per_1m_in": 0.3,
3650      "cost_per_1m_out": 0.5,
3651      "cost_per_1m_in_cached": 0,
3652      "cost_per_1m_out_cached": 0.075,
3653      "context_window": 131072,
3654      "default_max_tokens": 13107,
3655      "can_reason": true,
3656      "reasoning_levels": [
3657        "low",
3658        "medium",
3659        "high"
3660      ],
3661      "default_reasoning_effort": "medium",
3662      "supports_attachments": false
3663    },
3664    {
3665      "id": "x-ai/grok-3-mini-beta",
3666      "name": "xAI: Grok 3 Mini Beta",
3667      "cost_per_1m_in": 0.3,
3668      "cost_per_1m_out": 0.5,
3669      "cost_per_1m_in_cached": 0,
3670      "cost_per_1m_out_cached": 0.075,
3671      "context_window": 131072,
3672      "default_max_tokens": 13107,
3673      "can_reason": true,
3674      "reasoning_levels": [
3675        "low",
3676        "medium",
3677        "high"
3678      ],
3679      "default_reasoning_effort": "medium",
3680      "supports_attachments": false
3681    },
3682    {
3683      "id": "x-ai/grok-4",
3684      "name": "xAI: Grok 4",
3685      "cost_per_1m_in": 3,
3686      "cost_per_1m_out": 15,
3687      "cost_per_1m_in_cached": 0,
3688      "cost_per_1m_out_cached": 0.75,
3689      "context_window": 256000,
3690      "default_max_tokens": 25600,
3691      "can_reason": true,
3692      "reasoning_levels": [
3693        "low",
3694        "medium",
3695        "high"
3696      ],
3697      "default_reasoning_effort": "medium",
3698      "supports_attachments": true
3699    },
3700    {
3701      "id": "x-ai/grok-4-fast",
3702      "name": "xAI: Grok 4 Fast",
3703      "cost_per_1m_in": 0.2,
3704      "cost_per_1m_out": 0.5,
3705      "cost_per_1m_in_cached": 0,
3706      "cost_per_1m_out_cached": 0.05,
3707      "context_window": 2000000,
3708      "default_max_tokens": 15000,
3709      "can_reason": true,
3710      "reasoning_levels": [
3711        "low",
3712        "medium",
3713        "high"
3714      ],
3715      "default_reasoning_effort": "medium",
3716      "supports_attachments": true
3717    },
3718    {
3719      "id": "x-ai/grok-4.1-fast",
3720      "name": "xAI: Grok 4.1 Fast",
3721      "cost_per_1m_in": 0.2,
3722      "cost_per_1m_out": 0.5,
3723      "cost_per_1m_in_cached": 0,
3724      "cost_per_1m_out_cached": 0.05,
3725      "context_window": 2000000,
3726      "default_max_tokens": 15000,
3727      "can_reason": true,
3728      "reasoning_levels": [
3729        "low",
3730        "medium",
3731        "high"
3732      ],
3733      "default_reasoning_effort": "medium",
3734      "supports_attachments": true
3735    },
3736    {
3737      "id": "x-ai/grok-4.20",
3738      "name": "xAI: Grok 4.20",
3739      "cost_per_1m_in": 2,
3740      "cost_per_1m_out": 6,
3741      "cost_per_1m_in_cached": 0,
3742      "cost_per_1m_out_cached": 0.2,
3743      "context_window": 2000000,
3744      "default_max_tokens": 200000,
3745      "can_reason": true,
3746      "reasoning_levels": [
3747        "low",
3748        "medium",
3749        "high"
3750      ],
3751      "default_reasoning_effort": "medium",
3752      "supports_attachments": true
3753    },
3754    {
3755      "id": "x-ai/grok-code-fast-1",
3756      "name": "xAI: Grok Code Fast 1",
3757      "cost_per_1m_in": 0.2,
3758      "cost_per_1m_out": 1.5,
3759      "cost_per_1m_in_cached": 0,
3760      "cost_per_1m_out_cached": 0.02,
3761      "context_window": 256000,
3762      "default_max_tokens": 5000,
3763      "can_reason": true,
3764      "reasoning_levels": [
3765        "low",
3766        "medium",
3767        "high"
3768      ],
3769      "default_reasoning_effort": "medium",
3770      "supports_attachments": false
3771    }
3772  ],
3773  "default_headers": {
3774    "HTTP-Referer": "https://charm.land",
3775    "X-Title": "Crush"
3776  }
3777}