openrouter.json

   1{
   2  "name": "OpenRouter",
   3  "id": "openrouter",
   4  "api_key": "$OPENROUTER_API_KEY",
   5  "api_endpoint": "https://openrouter.ai/api/v1",
   6  "type": "openrouter",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-3.5-haiku",
   9  "models": [
  10    {
  11      "id": "ai21/jamba-large-1.7",
  12      "name": "AI21: Jamba Large 1.7",
  13      "cost_per_1m_in": 2,
  14      "cost_per_1m_out": 8,
  15      "cost_per_1m_in_cached": 0,
  16      "cost_per_1m_out_cached": 0,
  17      "context_window": 256000,
  18      "default_max_tokens": 2048,
  19      "can_reason": false,
  20      "supports_attachments": false
  21    },
  22    {
  23      "id": "amazon/nova-2-lite-v1",
  24      "name": "Amazon: Nova 2 Lite",
  25      "cost_per_1m_in": 0.3,
  26      "cost_per_1m_out": 2.5,
  27      "cost_per_1m_in_cached": 0,
  28      "cost_per_1m_out_cached": 0,
  29      "context_window": 1000000,
  30      "default_max_tokens": 32767,
  31      "can_reason": true,
  32      "reasoning_levels": [
  33        "low",
  34        "medium",
  35        "high"
  36      ],
  37      "default_reasoning_effort": "medium",
  38      "supports_attachments": true
  39    },
  40    {
  41      "id": "amazon/nova-lite-v1",
  42      "name": "Amazon: Nova Lite 1.0",
  43      "cost_per_1m_in": 0.06,
  44      "cost_per_1m_out": 0.24,
  45      "cost_per_1m_in_cached": 0,
  46      "cost_per_1m_out_cached": 0,
  47      "context_window": 300000,
  48      "default_max_tokens": 2560,
  49      "can_reason": false,
  50      "supports_attachments": true
  51    },
  52    {
  53      "id": "amazon/nova-micro-v1",
  54      "name": "Amazon: Nova Micro 1.0",
  55      "cost_per_1m_in": 0.035,
  56      "cost_per_1m_out": 0.14,
  57      "cost_per_1m_in_cached": 0,
  58      "cost_per_1m_out_cached": 0,
  59      "context_window": 128000,
  60      "default_max_tokens": 2560,
  61      "can_reason": false,
  62      "supports_attachments": false
  63    },
  64    {
  65      "id": "amazon/nova-premier-v1",
  66      "name": "Amazon: Nova Premier 1.0",
  67      "cost_per_1m_in": 2.5,
  68      "cost_per_1m_out": 12.5,
  69      "cost_per_1m_in_cached": 0,
  70      "cost_per_1m_out_cached": 0.625,
  71      "context_window": 1000000,
  72      "default_max_tokens": 16000,
  73      "can_reason": false,
  74      "supports_attachments": true
  75    },
  76    {
  77      "id": "amazon/nova-pro-v1",
  78      "name": "Amazon: Nova Pro 1.0",
  79      "cost_per_1m_in": 0.8,
  80      "cost_per_1m_out": 3.2,
  81      "cost_per_1m_in_cached": 0,
  82      "cost_per_1m_out_cached": 0,
  83      "context_window": 300000,
  84      "default_max_tokens": 2560,
  85      "can_reason": false,
  86      "supports_attachments": true
  87    },
  88    {
  89      "id": "anthropic/claude-3-haiku",
  90      "name": "Anthropic: Claude 3 Haiku",
  91      "cost_per_1m_in": 0.25,
  92      "cost_per_1m_out": 1.25,
  93      "cost_per_1m_in_cached": 0.3,
  94      "cost_per_1m_out_cached": 0.03,
  95      "context_window": 200000,
  96      "default_max_tokens": 2048,
  97      "can_reason": false,
  98      "supports_attachments": true
  99    },
 100    {
 101      "id": "anthropic/claude-3.5-haiku",
 102      "name": "Anthropic: Claude 3.5 Haiku",
 103      "cost_per_1m_in": 0.8,
 104      "cost_per_1m_out": 4,
 105      "cost_per_1m_in_cached": 1,
 106      "cost_per_1m_out_cached": 0.08,
 107      "context_window": 200000,
 108      "default_max_tokens": 4096,
 109      "can_reason": false,
 110      "supports_attachments": true
 111    },
 112    {
 113      "id": "anthropic/claude-haiku-4.5",
 114      "name": "Anthropic: Claude Haiku 4.5",
 115      "cost_per_1m_in": 1,
 116      "cost_per_1m_out": 5,
 117      "cost_per_1m_in_cached": 1.25,
 118      "cost_per_1m_out_cached": 0.1,
 119      "context_window": 200000,
 120      "default_max_tokens": 32000,
 121      "can_reason": true,
 122      "reasoning_levels": [
 123        "low",
 124        "medium",
 125        "high"
 126      ],
 127      "default_reasoning_effort": "medium",
 128      "supports_attachments": true
 129    },
 130    {
 131      "id": "anthropic/claude-opus-4",
 132      "name": "Anthropic: Claude Opus 4",
 133      "cost_per_1m_in": 15,
 134      "cost_per_1m_out": 75,
 135      "cost_per_1m_in_cached": 18.75,
 136      "cost_per_1m_out_cached": 1.5,
 137      "context_window": 200000,
 138      "default_max_tokens": 16000,
 139      "can_reason": true,
 140      "reasoning_levels": [
 141        "low",
 142        "medium",
 143        "high"
 144      ],
 145      "default_reasoning_effort": "medium",
 146      "supports_attachments": true
 147    },
 148    {
 149      "id": "anthropic/claude-opus-4.1",
 150      "name": "Anthropic: Claude Opus 4.1",
 151      "cost_per_1m_in": 15,
 152      "cost_per_1m_out": 75,
 153      "cost_per_1m_in_cached": 18.75,
 154      "cost_per_1m_out_cached": 1.5,
 155      "context_window": 200000,
 156      "default_max_tokens": 16000,
 157      "can_reason": true,
 158      "reasoning_levels": [
 159        "low",
 160        "medium",
 161        "high"
 162      ],
 163      "default_reasoning_effort": "medium",
 164      "supports_attachments": true
 165    },
 166    {
 167      "id": "anthropic/claude-opus-4.5",
 168      "name": "Anthropic: Claude Opus 4.5",
 169      "cost_per_1m_in": 5,
 170      "cost_per_1m_out": 25,
 171      "cost_per_1m_in_cached": 6.25,
 172      "cost_per_1m_out_cached": 0.5,
 173      "context_window": 200000,
 174      "default_max_tokens": 32000,
 175      "can_reason": true,
 176      "reasoning_levels": [
 177        "low",
 178        "medium",
 179        "high"
 180      ],
 181      "default_reasoning_effort": "medium",
 182      "supports_attachments": true
 183    },
 184    {
 185      "id": "anthropic/claude-opus-4.6",
 186      "name": "Anthropic: Claude Opus 4.6",
 187      "cost_per_1m_in": 5,
 188      "cost_per_1m_out": 25,
 189      "cost_per_1m_in_cached": 6.25,
 190      "cost_per_1m_out_cached": 0.5,
 191      "context_window": 1000000,
 192      "default_max_tokens": 64000,
 193      "can_reason": true,
 194      "reasoning_levels": [
 195        "low",
 196        "medium",
 197        "high"
 198      ],
 199      "default_reasoning_effort": "medium",
 200      "supports_attachments": true
 201    },
 202    {
 203      "id": "anthropic/claude-opus-4.6-fast",
 204      "name": "Anthropic: Claude Opus 4.6 (Fast)",
 205      "cost_per_1m_in": 30,
 206      "cost_per_1m_out": 150,
 207      "cost_per_1m_in_cached": 37.5,
 208      "cost_per_1m_out_cached": 3,
 209      "context_window": 1000000,
 210      "default_max_tokens": 64000,
 211      "can_reason": true,
 212      "reasoning_levels": [
 213        "low",
 214        "medium",
 215        "high"
 216      ],
 217      "default_reasoning_effort": "medium",
 218      "supports_attachments": true
 219    },
 220    {
 221      "id": "anthropic/claude-opus-4.7",
 222      "name": "Anthropic: Claude Opus 4.7",
 223      "cost_per_1m_in": 5,
 224      "cost_per_1m_out": 25,
 225      "cost_per_1m_in_cached": 6.25,
 226      "cost_per_1m_out_cached": 0.5,
 227      "context_window": 1000000,
 228      "default_max_tokens": 64000,
 229      "can_reason": true,
 230      "reasoning_levels": [
 231        "low",
 232        "medium",
 233        "high"
 234      ],
 235      "default_reasoning_effort": "medium",
 236      "supports_attachments": true
 237    },
 238    {
 239      "id": "anthropic/claude-sonnet-4",
 240      "name": "Anthropic: Claude Sonnet 4",
 241      "cost_per_1m_in": 3,
 242      "cost_per_1m_out": 15,
 243      "cost_per_1m_in_cached": 3.75,
 244      "cost_per_1m_out_cached": 0.3,
 245      "context_window": 1000000,
 246      "default_max_tokens": 32000,
 247      "can_reason": true,
 248      "reasoning_levels": [
 249        "low",
 250        "medium",
 251        "high"
 252      ],
 253      "default_reasoning_effort": "medium",
 254      "supports_attachments": true
 255    },
 256    {
 257      "id": "anthropic/claude-sonnet-4.5",
 258      "name": "Anthropic: Claude Sonnet 4.5",
 259      "cost_per_1m_in": 3,
 260      "cost_per_1m_out": 15,
 261      "cost_per_1m_in_cached": 3.75,
 262      "cost_per_1m_out_cached": 0.3,
 263      "context_window": 1000000,
 264      "default_max_tokens": 32000,
 265      "can_reason": true,
 266      "reasoning_levels": [
 267        "low",
 268        "medium",
 269        "high"
 270      ],
 271      "default_reasoning_effort": "medium",
 272      "supports_attachments": true
 273    },
 274    {
 275      "id": "anthropic/claude-sonnet-4.6",
 276      "name": "Anthropic: Claude Sonnet 4.6",
 277      "cost_per_1m_in": 3,
 278      "cost_per_1m_out": 15,
 279      "cost_per_1m_in_cached": 3.75,
 280      "cost_per_1m_out_cached": 0.3,
 281      "context_window": 1000000,
 282      "default_max_tokens": 64000,
 283      "can_reason": true,
 284      "reasoning_levels": [
 285        "low",
 286        "medium",
 287        "high"
 288      ],
 289      "default_reasoning_effort": "medium",
 290      "supports_attachments": true
 291    },
 292    {
 293      "id": "arcee-ai/trinity-large-preview",
 294      "name": "Arcee AI: Trinity Large Preview",
 295      "cost_per_1m_in": 0.15,
 296      "cost_per_1m_out": 0.45,
 297      "cost_per_1m_in_cached": 0,
 298      "cost_per_1m_out_cached": 0,
 299      "context_window": 131000,
 300      "default_max_tokens": 13100,
 301      "can_reason": false,
 302      "supports_attachments": false
 303    },
 304    {
 305      "id": "arcee-ai/trinity-large-thinking",
 306      "name": "Arcee AI: Trinity Large Thinking",
 307      "cost_per_1m_in": 0.22,
 308      "cost_per_1m_out": 0.85,
 309      "cost_per_1m_in_cached": 0,
 310      "cost_per_1m_out_cached": 0.06,
 311      "context_window": 262144,
 312      "default_max_tokens": 131072,
 313      "can_reason": true,
 314      "reasoning_levels": [
 315        "low",
 316        "medium",
 317        "high"
 318      ],
 319      "default_reasoning_effort": "medium",
 320      "supports_attachments": false
 321    },
 322    {
 323      "id": "arcee-ai/trinity-large-thinking:free",
 324      "name": "Arcee AI: Trinity Large Thinking (free)",
 325      "cost_per_1m_in": 0,
 326      "cost_per_1m_out": 0,
 327      "cost_per_1m_in_cached": 0,
 328      "cost_per_1m_out_cached": 0,
 329      "context_window": 262144,
 330      "default_max_tokens": 40000,
 331      "can_reason": true,
 332      "reasoning_levels": [
 333        "low",
 334        "medium",
 335        "high"
 336      ],
 337      "default_reasoning_effort": "medium",
 338      "supports_attachments": false
 339    },
 340    {
 341      "id": "arcee-ai/trinity-mini",
 342      "name": "Arcee AI: Trinity Mini",
 343      "cost_per_1m_in": 0.045,
 344      "cost_per_1m_out": 0.15,
 345      "cost_per_1m_in_cached": 0,
 346      "cost_per_1m_out_cached": 0,
 347      "context_window": 131072,
 348      "default_max_tokens": 65536,
 349      "can_reason": true,
 350      "reasoning_levels": [
 351        "low",
 352        "medium",
 353        "high"
 354      ],
 355      "default_reasoning_effort": "medium",
 356      "supports_attachments": false
 357    },
 358    {
 359      "id": "arcee-ai/virtuoso-large",
 360      "name": "Arcee AI: Virtuoso Large",
 361      "cost_per_1m_in": 0.75,
 362      "cost_per_1m_out": 1.2,
 363      "cost_per_1m_in_cached": 0,
 364      "cost_per_1m_out_cached": 0,
 365      "context_window": 131072,
 366      "default_max_tokens": 32000,
 367      "can_reason": false,
 368      "supports_attachments": false
 369    },
 370    {
 371      "id": "baidu/cobuddy:free",
 372      "name": "Baidu Qianfan: CoBuddy (free)",
 373      "cost_per_1m_in": 0,
 374      "cost_per_1m_out": 0,
 375      "cost_per_1m_in_cached": 0,
 376      "cost_per_1m_out_cached": 0,
 377      "context_window": 131072,
 378      "default_max_tokens": 32768,
 379      "can_reason": true,
 380      "reasoning_levels": [
 381        "low",
 382        "medium",
 383        "high"
 384      ],
 385      "default_reasoning_effort": "medium",
 386      "supports_attachments": false
 387    },
 388    {
 389      "id": "baidu/ernie-4.5-21b-a3b",
 390      "name": "Baidu: ERNIE 4.5 21B A3B",
 391      "cost_per_1m_in": 0.07,
 392      "cost_per_1m_out": 0.28,
 393      "cost_per_1m_in_cached": 0,
 394      "cost_per_1m_out_cached": 0,
 395      "context_window": 120000,
 396      "default_max_tokens": 4000,
 397      "can_reason": false,
 398      "supports_attachments": false
 399    },
 400    {
 401      "id": "baidu/ernie-4.5-vl-28b-a3b",
 402      "name": "Baidu: ERNIE 4.5 VL 28B A3B",
 403      "cost_per_1m_in": 0.14,
 404      "cost_per_1m_out": 0.56,
 405      "cost_per_1m_in_cached": 0,
 406      "cost_per_1m_out_cached": 0,
 407      "context_window": 30000,
 408      "default_max_tokens": 4000,
 409      "can_reason": true,
 410      "reasoning_levels": [
 411        "low",
 412        "medium",
 413        "high"
 414      ],
 415      "default_reasoning_effort": "medium",
 416      "supports_attachments": true
 417    },
 418    {
 419      "id": "bytedance-seed/seed-1.6",
 420      "name": "ByteDance Seed: Seed 1.6",
 421      "cost_per_1m_in": 0.25,
 422      "cost_per_1m_out": 2,
 423      "cost_per_1m_in_cached": 0,
 424      "cost_per_1m_out_cached": 0,
 425      "context_window": 262144,
 426      "default_max_tokens": 16384,
 427      "can_reason": true,
 428      "reasoning_levels": [
 429        "low",
 430        "medium",
 431        "high"
 432      ],
 433      "default_reasoning_effort": "medium",
 434      "supports_attachments": true
 435    },
 436    {
 437      "id": "bytedance-seed/seed-1.6-flash",
 438      "name": "ByteDance Seed: Seed 1.6 Flash",
 439      "cost_per_1m_in": 0.075,
 440      "cost_per_1m_out": 0.3,
 441      "cost_per_1m_in_cached": 0,
 442      "cost_per_1m_out_cached": 0,
 443      "context_window": 262144,
 444      "default_max_tokens": 16384,
 445      "can_reason": true,
 446      "reasoning_levels": [
 447        "low",
 448        "medium",
 449        "high"
 450      ],
 451      "default_reasoning_effort": "medium",
 452      "supports_attachments": true
 453    },
 454    {
 455      "id": "bytedance-seed/seed-2.0-lite",
 456      "name": "ByteDance Seed: Seed-2.0-Lite",
 457      "cost_per_1m_in": 0.25,
 458      "cost_per_1m_out": 2,
 459      "cost_per_1m_in_cached": 0,
 460      "cost_per_1m_out_cached": 0,
 461      "context_window": 262144,
 462      "default_max_tokens": 65536,
 463      "can_reason": true,
 464      "reasoning_levels": [
 465        "low",
 466        "medium",
 467        "high"
 468      ],
 469      "default_reasoning_effort": "medium",
 470      "supports_attachments": true
 471    },
 472    {
 473      "id": "bytedance-seed/seed-2.0-mini",
 474      "name": "ByteDance Seed: Seed-2.0-Mini",
 475      "cost_per_1m_in": 0.1,
 476      "cost_per_1m_out": 0.4,
 477      "cost_per_1m_in_cached": 0,
 478      "cost_per_1m_out_cached": 0,
 479      "context_window": 262144,
 480      "default_max_tokens": 65536,
 481      "can_reason": true,
 482      "reasoning_levels": [
 483        "low",
 484        "medium",
 485        "high"
 486      ],
 487      "default_reasoning_effort": "medium",
 488      "supports_attachments": true
 489    },
 490    {
 491      "id": "cohere/command-r-08-2024",
 492      "name": "Cohere: Command R (08-2024)",
 493      "cost_per_1m_in": 0.15,
 494      "cost_per_1m_out": 0.6,
 495      "cost_per_1m_in_cached": 0,
 496      "cost_per_1m_out_cached": 0,
 497      "context_window": 128000,
 498      "default_max_tokens": 2000,
 499      "can_reason": false,
 500      "supports_attachments": false
 501    },
 502    {
 503      "id": "cohere/command-r-plus-08-2024",
 504      "name": "Cohere: Command R+ (08-2024)",
 505      "cost_per_1m_in": 2.5,
 506      "cost_per_1m_out": 10,
 507      "cost_per_1m_in_cached": 0,
 508      "cost_per_1m_out_cached": 0,
 509      "context_window": 128000,
 510      "default_max_tokens": 2000,
 511      "can_reason": false,
 512      "supports_attachments": false
 513    },
 514    {
 515      "id": "deepseek/deepseek-chat",
 516      "name": "DeepSeek: DeepSeek V3",
 517      "cost_per_1m_in": 0.4,
 518      "cost_per_1m_out": 1.3,
 519      "cost_per_1m_in_cached": 0,
 520      "cost_per_1m_out_cached": 0,
 521      "context_window": 64000,
 522      "default_max_tokens": 8000,
 523      "can_reason": false,
 524      "supports_attachments": false
 525    },
 526    {
 527      "id": "deepseek/deepseek-chat-v3-0324",
 528      "name": "DeepSeek: DeepSeek V3 0324",
 529      "cost_per_1m_in": 0.27,
 530      "cost_per_1m_out": 1.12,
 531      "cost_per_1m_in_cached": 0,
 532      "cost_per_1m_out_cached": 0.135,
 533      "context_window": 163840,
 534      "default_max_tokens": 32768,
 535      "can_reason": false,
 536      "supports_attachments": false
 537    },
 538    {
 539      "id": "deepseek/deepseek-chat-v3.1",
 540      "name": "DeepSeek: DeepSeek V3.1",
 541      "cost_per_1m_in": 0.21,
 542      "cost_per_1m_out": 0.79,
 543      "cost_per_1m_in_cached": 0,
 544      "cost_per_1m_out_cached": 0.13,
 545      "context_window": 163840,
 546      "default_max_tokens": 16384,
 547      "can_reason": true,
 548      "reasoning_levels": [
 549        "low",
 550        "medium",
 551        "high"
 552      ],
 553      "default_reasoning_effort": "medium",
 554      "supports_attachments": false
 555    },
 556    {
 557      "id": "deepseek/deepseek-v3.1-terminus",
 558      "name": "DeepSeek: DeepSeek V3.1 Terminus",
 559      "cost_per_1m_in": 0.27,
 560      "cost_per_1m_out": 0.95,
 561      "cost_per_1m_in_cached": 0,
 562      "cost_per_1m_out_cached": 0.13,
 563      "context_window": 163840,
 564      "default_max_tokens": 16384,
 565      "can_reason": true,
 566      "reasoning_levels": [
 567        "low",
 568        "medium",
 569        "high"
 570      ],
 571      "default_reasoning_effort": "medium",
 572      "supports_attachments": false
 573    },
 574    {
 575      "id": "deepseek/deepseek-v3.2",
 576      "name": "DeepSeek: DeepSeek V3.2",
 577      "cost_per_1m_in": 0.26,
 578      "cost_per_1m_out": 0.38,
 579      "cost_per_1m_in_cached": 0,
 580      "cost_per_1m_out_cached": 0.13,
 581      "context_window": 163840,
 582      "default_max_tokens": 81920,
 583      "can_reason": true,
 584      "reasoning_levels": [
 585        "low",
 586        "medium",
 587        "high"
 588      ],
 589      "default_reasoning_effort": "medium",
 590      "supports_attachments": false
 591    },
 592    {
 593      "id": "deepseek/deepseek-v3.2-exp",
 594      "name": "DeepSeek: DeepSeek V3.2 Exp",
 595      "cost_per_1m_in": 0.27,
 596      "cost_per_1m_out": 0.41,
 597      "cost_per_1m_in_cached": 0,
 598      "cost_per_1m_out_cached": 0,
 599      "context_window": 163840,
 600      "default_max_tokens": 32768,
 601      "can_reason": true,
 602      "reasoning_levels": [
 603        "low",
 604        "medium",
 605        "high"
 606      ],
 607      "default_reasoning_effort": "medium",
 608      "supports_attachments": false
 609    },
 610    {
 611      "id": "deepseek/deepseek-v4-flash",
 612      "name": "DeepSeek: DeepSeek V4 Flash",
 613      "cost_per_1m_in": 0.14,
 614      "cost_per_1m_out": 0.28,
 615      "cost_per_1m_in_cached": 0,
 616      "cost_per_1m_out_cached": 0.0028,
 617      "context_window": 1048576,
 618      "default_max_tokens": 192000,
 619      "can_reason": true,
 620      "reasoning_levels": [
 621        "low",
 622        "medium",
 623        "high"
 624      ],
 625      "default_reasoning_effort": "medium",
 626      "supports_attachments": false
 627    },
 628    {
 629      "id": "deepseek/deepseek-v4-pro",
 630      "name": "DeepSeek: DeepSeek V4 Pro",
 631      "cost_per_1m_in": 0.435,
 632      "cost_per_1m_out": 0.87,
 633      "cost_per_1m_in_cached": 0,
 634      "cost_per_1m_out_cached": 0.00363,
 635      "context_window": 1048576,
 636      "default_max_tokens": 192000,
 637      "can_reason": true,
 638      "reasoning_levels": [
 639        "low",
 640        "medium",
 641        "high"
 642      ],
 643      "default_reasoning_effort": "medium",
 644      "supports_attachments": false
 645    },
 646    {
 647      "id": "deepseek/deepseek-r1",
 648      "name": "DeepSeek: R1",
 649      "cost_per_1m_in": 0.7,
 650      "cost_per_1m_out": 2.5,
 651      "cost_per_1m_in_cached": 0,
 652      "cost_per_1m_out_cached": 0,
 653      "context_window": 64000,
 654      "default_max_tokens": 8000,
 655      "can_reason": true,
 656      "reasoning_levels": [
 657        "low",
 658        "medium",
 659        "high"
 660      ],
 661      "default_reasoning_effort": "medium",
 662      "supports_attachments": false
 663    },
 664    {
 665      "id": "deepseek/deepseek-r1-0528",
 666      "name": "DeepSeek: R1 0528",
 667      "cost_per_1m_in": 0.5,
 668      "cost_per_1m_out": 2.18,
 669      "cost_per_1m_in_cached": 0,
 670      "cost_per_1m_out_cached": 0,
 671      "context_window": 163840,
 672      "default_max_tokens": 81920,
 673      "can_reason": true,
 674      "reasoning_levels": [
 675        "low",
 676        "medium",
 677        "high"
 678      ],
 679      "default_reasoning_effort": "medium",
 680      "supports_attachments": false
 681    },
 682    {
 683      "id": "essentialai/rnj-1-instruct",
 684      "name": "EssentialAI: Rnj 1 Instruct",
 685      "cost_per_1m_in": 0.15,
 686      "cost_per_1m_out": 0.15,
 687      "cost_per_1m_in_cached": 0,
 688      "cost_per_1m_out_cached": 0,
 689      "context_window": 32768,
 690      "default_max_tokens": 3276,
 691      "can_reason": false,
 692      "supports_attachments": false
 693    },
 694    {
 695      "id": "google/gemini-2.0-flash-001",
 696      "name": "Google: Gemini 2.0 Flash",
 697      "cost_per_1m_in": 0.1,
 698      "cost_per_1m_out": 0.4,
 699      "cost_per_1m_in_cached": 0.08333,
 700      "cost_per_1m_out_cached": 0.025,
 701      "context_window": 1048576,
 702      "default_max_tokens": 4096,
 703      "can_reason": false,
 704      "supports_attachments": true
 705    },
 706    {
 707      "id": "google/gemini-2.0-flash-lite-001",
 708      "name": "Google: Gemini 2.0 Flash Lite",
 709      "cost_per_1m_in": 0.075,
 710      "cost_per_1m_out": 0.3,
 711      "cost_per_1m_in_cached": 0,
 712      "cost_per_1m_out_cached": 0,
 713      "context_window": 1048576,
 714      "default_max_tokens": 4096,
 715      "can_reason": false,
 716      "supports_attachments": true
 717    },
 718    {
 719      "id": "google/gemini-2.5-flash",
 720      "name": "Google: Gemini 2.5 Flash",
 721      "cost_per_1m_in": 0.3,
 722      "cost_per_1m_out": 2.5,
 723      "cost_per_1m_in_cached": 0.08333,
 724      "cost_per_1m_out_cached": 0.03,
 725      "context_window": 1048576,
 726      "default_max_tokens": 32767,
 727      "can_reason": true,
 728      "reasoning_levels": [
 729        "low",
 730        "medium",
 731        "high"
 732      ],
 733      "default_reasoning_effort": "medium",
 734      "supports_attachments": true
 735    },
 736    {
 737      "id": "google/gemini-2.5-flash-lite",
 738      "name": "Google: Gemini 2.5 Flash Lite",
 739      "cost_per_1m_in": 0.1,
 740      "cost_per_1m_out": 0.4,
 741      "cost_per_1m_in_cached": 0.08333,
 742      "cost_per_1m_out_cached": 0.01,
 743      "context_window": 1048576,
 744      "default_max_tokens": 32767,
 745      "can_reason": true,
 746      "reasoning_levels": [
 747        "low",
 748        "medium",
 749        "high"
 750      ],
 751      "default_reasoning_effort": "medium",
 752      "supports_attachments": true
 753    },
 754    {
 755      "id": "google/gemini-2.5-flash-lite-preview-09-2025",
 756      "name": "Google: Gemini 2.5 Flash Lite Preview 09-2025",
 757      "cost_per_1m_in": 0.1,
 758      "cost_per_1m_out": 0.4,
 759      "cost_per_1m_in_cached": 0.08333,
 760      "cost_per_1m_out_cached": 0.01,
 761      "context_window": 1048576,
 762      "default_max_tokens": 32767,
 763      "can_reason": true,
 764      "reasoning_levels": [
 765        "low",
 766        "medium",
 767        "high"
 768      ],
 769      "default_reasoning_effort": "medium",
 770      "supports_attachments": true
 771    },
 772    {
 773      "id": "google/gemini-2.5-pro",
 774      "name": "Google: Gemini 2.5 Pro",
 775      "cost_per_1m_in": 1.25,
 776      "cost_per_1m_out": 10,
 777      "cost_per_1m_in_cached": 0.375,
 778      "cost_per_1m_out_cached": 0.125,
 779      "context_window": 1048576,
 780      "default_max_tokens": 32768,
 781      "can_reason": true,
 782      "reasoning_levels": [
 783        "low",
 784        "medium",
 785        "high"
 786      ],
 787      "default_reasoning_effort": "medium",
 788      "supports_attachments": true
 789    },
 790    {
 791      "id": "google/gemini-2.5-pro-preview-05-06",
 792      "name": "Google: Gemini 2.5 Pro Preview 05-06",
 793      "cost_per_1m_in": 1.25,
 794      "cost_per_1m_out": 10,
 795      "cost_per_1m_in_cached": 0.375,
 796      "cost_per_1m_out_cached": 0.125,
 797      "context_window": 1048576,
 798      "default_max_tokens": 32768,
 799      "can_reason": true,
 800      "reasoning_levels": [
 801        "low",
 802        "medium",
 803        "high"
 804      ],
 805      "default_reasoning_effort": "medium",
 806      "supports_attachments": true
 807    },
 808    {
 809      "id": "google/gemini-2.5-pro-preview",
 810      "name": "Google: Gemini 2.5 Pro Preview 06-05",
 811      "cost_per_1m_in": 1.25,
 812      "cost_per_1m_out": 10,
 813      "cost_per_1m_in_cached": 0.375,
 814      "cost_per_1m_out_cached": 0.125,
 815      "context_window": 1048576,
 816      "default_max_tokens": 32768,
 817      "can_reason": true,
 818      "reasoning_levels": [
 819        "low",
 820        "medium",
 821        "high"
 822      ],
 823      "default_reasoning_effort": "medium",
 824      "supports_attachments": true
 825    },
 826    {
 827      "id": "google/gemini-3-flash-preview",
 828      "name": "Google: Gemini 3 Flash Preview",
 829      "cost_per_1m_in": 0.5,
 830      "cost_per_1m_out": 3,
 831      "cost_per_1m_in_cached": 0.08333,
 832      "cost_per_1m_out_cached": 0.05,
 833      "context_window": 1048576,
 834      "default_max_tokens": 32768,
 835      "can_reason": true,
 836      "reasoning_levels": [
 837        "low",
 838        "medium",
 839        "high"
 840      ],
 841      "default_reasoning_effort": "medium",
 842      "supports_attachments": true
 843    },
 844    {
 845      "id": "google/gemini-3.1-flash-lite",
 846      "name": "Google: Gemini 3.1 Flash Lite",
 847      "cost_per_1m_in": 0.25,
 848      "cost_per_1m_out": 1.5,
 849      "cost_per_1m_in_cached": 0.08333,
 850      "cost_per_1m_out_cached": 0.025,
 851      "context_window": 1048576,
 852      "default_max_tokens": 32768,
 853      "can_reason": true,
 854      "reasoning_levels": [
 855        "low",
 856        "medium",
 857        "high"
 858      ],
 859      "default_reasoning_effort": "medium",
 860      "supports_attachments": true
 861    },
 862    {
 863      "id": "google/gemini-3.1-flash-lite-preview",
 864      "name": "Google: Gemini 3.1 Flash Lite Preview",
 865      "cost_per_1m_in": 0.25,
 866      "cost_per_1m_out": 1.5,
 867      "cost_per_1m_in_cached": 0.08333,
 868      "cost_per_1m_out_cached": 0.025,
 869      "context_window": 1048576,
 870      "default_max_tokens": 32768,
 871      "can_reason": true,
 872      "reasoning_levels": [
 873        "low",
 874        "medium",
 875        "high"
 876      ],
 877      "default_reasoning_effort": "medium",
 878      "supports_attachments": true
 879    },
 880    {
 881      "id": "google/gemini-3.1-pro-preview",
 882      "name": "Google: Gemini 3.1 Pro Preview",
 883      "cost_per_1m_in": 2,
 884      "cost_per_1m_out": 12,
 885      "cost_per_1m_in_cached": 0.375,
 886      "cost_per_1m_out_cached": 0.2,
 887      "context_window": 1048576,
 888      "default_max_tokens": 32768,
 889      "can_reason": true,
 890      "reasoning_levels": [
 891        "low",
 892        "medium",
 893        "high"
 894      ],
 895      "default_reasoning_effort": "medium",
 896      "supports_attachments": true
 897    },
 898    {
 899      "id": "google/gemini-3.1-pro-preview-customtools",
 900      "name": "Google: Gemini 3.1 Pro Preview Custom Tools",
 901      "cost_per_1m_in": 2,
 902      "cost_per_1m_out": 12,
 903      "cost_per_1m_in_cached": 0.375,
 904      "cost_per_1m_out_cached": 0.2,
 905      "context_window": 1048576,
 906      "default_max_tokens": 32768,
 907      "can_reason": true,
 908      "reasoning_levels": [
 909        "low",
 910        "medium",
 911        "high"
 912      ],
 913      "default_reasoning_effort": "medium",
 914      "supports_attachments": true
 915    },
 916    {
 917      "id": "google/gemma-3-12b-it",
 918      "name": "Google: Gemma 3 12B",
 919      "cost_per_1m_in": 0.04,
 920      "cost_per_1m_out": 0.13,
 921      "cost_per_1m_in_cached": 0,
 922      "cost_per_1m_out_cached": 0,
 923      "context_window": 131072,
 924      "default_max_tokens": 8192,
 925      "can_reason": false,
 926      "supports_attachments": true
 927    },
 928    {
 929      "id": "google/gemma-3-27b-it",
 930      "name": "Google: Gemma 3 27B",
 931      "cost_per_1m_in": 0.08,
 932      "cost_per_1m_out": 0.16,
 933      "cost_per_1m_in_cached": 0,
 934      "cost_per_1m_out_cached": 0,
 935      "context_window": 131072,
 936      "default_max_tokens": 8192,
 937      "can_reason": false,
 938      "supports_attachments": true
 939    },
 940    {
 941      "id": "google/gemma-4-26b-a4b-it",
 942      "name": "Google: Gemma 4 26B A4B ",
 943      "cost_per_1m_in": 0.15,
 944      "cost_per_1m_out": 0.6,
 945      "cost_per_1m_in_cached": 0,
 946      "cost_per_1m_out_cached": 0,
 947      "context_window": 262144,
 948      "default_max_tokens": 131072,
 949      "can_reason": true,
 950      "reasoning_levels": [
 951        "low",
 952        "medium",
 953        "high"
 954      ],
 955      "default_reasoning_effort": "medium",
 956      "supports_attachments": true
 957    },
 958    {
 959      "id": "google/gemma-4-26b-a4b-it:free",
 960      "name": "Google: Gemma 4 26B A4B  (free)",
 961      "cost_per_1m_in": 0,
 962      "cost_per_1m_out": 0,
 963      "cost_per_1m_in_cached": 0,
 964      "cost_per_1m_out_cached": 0,
 965      "context_window": 262144,
 966      "default_max_tokens": 16384,
 967      "can_reason": true,
 968      "reasoning_levels": [
 969        "low",
 970        "medium",
 971        "high"
 972      ],
 973      "default_reasoning_effort": "medium",
 974      "supports_attachments": true
 975    },
 976    {
 977      "id": "google/gemma-4-31b-it",
 978      "name": "Google: Gemma 4 31B",
 979      "cost_per_1m_in": 0.13,
 980      "cost_per_1m_out": 0.38,
 981      "cost_per_1m_in_cached": 0,
 982      "cost_per_1m_out_cached": 0,
 983      "context_window": 262144,
 984      "default_max_tokens": 8192,
 985      "can_reason": true,
 986      "reasoning_levels": [
 987        "low",
 988        "medium",
 989        "high"
 990      ],
 991      "default_reasoning_effort": "medium",
 992      "supports_attachments": true
 993    },
 994    {
 995      "id": "google/gemma-4-31b-it:free",
 996      "name": "Google: Gemma 4 31B (free)",
 997      "cost_per_1m_in": 0,
 998      "cost_per_1m_out": 0,
 999      "cost_per_1m_in_cached": 0,
1000      "cost_per_1m_out_cached": 0,
1001      "context_window": 262144,
1002      "default_max_tokens": 16384,
1003      "can_reason": true,
1004      "reasoning_levels": [
1005        "low",
1006        "medium",
1007        "high"
1008      ],
1009      "default_reasoning_effort": "medium",
1010      "supports_attachments": true
1011    },
1012    {
1013      "id": "ibm-granite/granite-4.1-8b",
1014      "name": "IBM: Granite 4.1 8B",
1015      "cost_per_1m_in": 0.05,
1016      "cost_per_1m_out": 0.1,
1017      "cost_per_1m_in_cached": 0,
1018      "cost_per_1m_out_cached": 0.05,
1019      "context_window": 131072,
1020      "default_max_tokens": 65536,
1021      "can_reason": false,
1022      "supports_attachments": false
1023    },
1024    {
1025      "id": "inception/mercury-2",
1026      "name": "Inception: Mercury 2",
1027      "cost_per_1m_in": 0.25,
1028      "cost_per_1m_out": 0.75,
1029      "cost_per_1m_in_cached": 0,
1030      "cost_per_1m_out_cached": 0.025,
1031      "context_window": 128000,
1032      "default_max_tokens": 25000,
1033      "can_reason": true,
1034      "reasoning_levels": [
1035        "low",
1036        "medium",
1037        "high"
1038      ],
1039      "default_reasoning_effort": "medium",
1040      "supports_attachments": false
1041    },
1042    {
1043      "id": "kwaipilot/kat-coder-pro-v2",
1044      "name": "Kwaipilot: KAT-Coder-Pro V2",
1045      "cost_per_1m_in": 0.3,
1046      "cost_per_1m_out": 1.2,
1047      "cost_per_1m_in_cached": 0,
1048      "cost_per_1m_out_cached": 0.06,
1049      "context_window": 256000,
1050      "default_max_tokens": 40000,
1051      "can_reason": false,
1052      "supports_attachments": false
1053    },
1054    {
1055      "id": "meta-llama/llama-3.1-70b-instruct",
1056      "name": "Meta: Llama 3.1 70B Instruct",
1057      "cost_per_1m_in": 0.4,
1058      "cost_per_1m_out": 0.4,
1059      "cost_per_1m_in_cached": 0,
1060      "cost_per_1m_out_cached": 0,
1061      "context_window": 131072,
1062      "default_max_tokens": 8192,
1063      "can_reason": false,
1064      "supports_attachments": false
1065    },
1066    {
1067      "id": "meta-llama/llama-3.3-70b-instruct",
1068      "name": "Meta: Llama 3.3 70B Instruct",
1069      "cost_per_1m_in": 0.135,
1070      "cost_per_1m_out": 0.4,
1071      "cost_per_1m_in_cached": 0,
1072      "cost_per_1m_out_cached": 0,
1073      "context_window": 131072,
1074      "default_max_tokens": 60000,
1075      "can_reason": false,
1076      "supports_attachments": false
1077    },
1078    {
1079      "id": "meta-llama/llama-3.3-70b-instruct:free",
1080      "name": "Meta: Llama 3.3 70B Instruct (free)",
1081      "cost_per_1m_in": 0,
1082      "cost_per_1m_out": 0,
1083      "cost_per_1m_in_cached": 0,
1084      "cost_per_1m_out_cached": 0,
1085      "context_window": 65536,
1086      "default_max_tokens": 6553,
1087      "can_reason": false,
1088      "supports_attachments": false
1089    },
1090    {
1091      "id": "meta-llama/llama-4-scout",
1092      "name": "Meta: Llama 4 Scout",
1093      "cost_per_1m_in": 0.25,
1094      "cost_per_1m_out": 0.7,
1095      "cost_per_1m_in_cached": 0,
1096      "cost_per_1m_out_cached": 0,
1097      "context_window": 1310720,
1098      "default_max_tokens": 4096,
1099      "can_reason": false,
1100      "supports_attachments": true
1101    },
1102    {
1103      "id": "minimax/minimax-m2",
1104      "name": "MiniMax: MiniMax M2",
1105      "cost_per_1m_in": 0.255,
1106      "cost_per_1m_out": 1,
1107      "cost_per_1m_in_cached": 0,
1108      "cost_per_1m_out_cached": 0.03,
1109      "context_window": 196608,
1110      "default_max_tokens": 98304,
1111      "can_reason": true,
1112      "reasoning_levels": [
1113        "low",
1114        "medium",
1115        "high"
1116      ],
1117      "default_reasoning_effort": "medium",
1118      "supports_attachments": false
1119    },
1120    {
1121      "id": "minimax/minimax-m2.1",
1122      "name": "MiniMax: MiniMax M2.1",
1123      "cost_per_1m_in": 0.3,
1124      "cost_per_1m_out": 1.2,
1125      "cost_per_1m_in_cached": 0,
1126      "cost_per_1m_out_cached": 0.03,
1127      "context_window": 204800,
1128      "default_max_tokens": 65536,
1129      "can_reason": true,
1130      "reasoning_levels": [
1131        "low",
1132        "medium",
1133        "high"
1134      ],
1135      "default_reasoning_effort": "medium",
1136      "supports_attachments": false
1137    },
1138    {
1139      "id": "minimax/minimax-m2.5",
1140      "name": "MiniMax: MiniMax M2.5",
1141      "cost_per_1m_in": 0.3,
1142      "cost_per_1m_out": 1.2,
1143      "cost_per_1m_in_cached": 0,
1144      "cost_per_1m_out_cached": 0.03,
1145      "context_window": 204800,
1146      "default_max_tokens": 65550,
1147      "can_reason": true,
1148      "reasoning_levels": [
1149        "low",
1150        "medium",
1151        "high"
1152      ],
1153      "default_reasoning_effort": "medium",
1154      "supports_attachments": false
1155    },
1156    {
1157      "id": "minimax/minimax-m2.5:free",
1158      "name": "MiniMax: MiniMax M2.5 (free)",
1159      "cost_per_1m_in": 0,
1160      "cost_per_1m_out": 0,
1161      "cost_per_1m_in_cached": 0,
1162      "cost_per_1m_out_cached": 0,
1163      "context_window": 196608,
1164      "default_max_tokens": 4096,
1165      "can_reason": true,
1166      "reasoning_levels": [
1167        "low",
1168        "medium",
1169        "high"
1170      ],
1171      "default_reasoning_effort": "medium",
1172      "supports_attachments": false
1173    },
1174    {
1175      "id": "minimax/minimax-m2.7",
1176      "name": "MiniMax: MiniMax M2.7",
1177      "cost_per_1m_in": 0.3,
1178      "cost_per_1m_out": 1.2,
1179      "cost_per_1m_in_cached": 0,
1180      "cost_per_1m_out_cached": 0.06,
1181      "context_window": 204800,
1182      "default_max_tokens": 65536,
1183      "can_reason": true,
1184      "reasoning_levels": [
1185        "low",
1186        "medium",
1187        "high"
1188      ],
1189      "default_reasoning_effort": "medium",
1190      "supports_attachments": false
1191    },
1192    {
1193      "id": "mistralai/mistral-large",
1194      "name": "Mistral Large",
1195      "cost_per_1m_in": 2,
1196      "cost_per_1m_out": 6,
1197      "cost_per_1m_in_cached": 0,
1198      "cost_per_1m_out_cached": 0.2,
1199      "context_window": 128000,
1200      "default_max_tokens": 12800,
1201      "can_reason": false,
1202      "supports_attachments": false
1203    },
1204    {
1205      "id": "mistralai/mistral-large-2407",
1206      "name": "Mistral Large 2407",
1207      "cost_per_1m_in": 2,
1208      "cost_per_1m_out": 6,
1209      "cost_per_1m_in_cached": 0,
1210      "cost_per_1m_out_cached": 0.2,
1211      "context_window": 131072,
1212      "default_max_tokens": 13107,
1213      "can_reason": false,
1214      "supports_attachments": false
1215    },
1216    {
1217      "id": "mistralai/mistral-large-2411",
1218      "name": "Mistral Large 2411",
1219      "cost_per_1m_in": 2,
1220      "cost_per_1m_out": 6,
1221      "cost_per_1m_in_cached": 0,
1222      "cost_per_1m_out_cached": 0.2,
1223      "context_window": 131072,
1224      "default_max_tokens": 13107,
1225      "can_reason": false,
1226      "supports_attachments": false
1227    },
1228    {
1229      "id": "mistralai/codestral-2508",
1230      "name": "Mistral: Codestral 2508",
1231      "cost_per_1m_in": 0.3,
1232      "cost_per_1m_out": 0.9,
1233      "cost_per_1m_in_cached": 0,
1234      "cost_per_1m_out_cached": 0.03,
1235      "context_window": 256000,
1236      "default_max_tokens": 25600,
1237      "can_reason": false,
1238      "supports_attachments": false
1239    },
1240    {
1241      "id": "mistralai/devstral-2512",
1242      "name": "Mistral: Devstral 2 2512",
1243      "cost_per_1m_in": 0.4,
1244      "cost_per_1m_out": 2,
1245      "cost_per_1m_in_cached": 0,
1246      "cost_per_1m_out_cached": 0.04,
1247      "context_window": 262144,
1248      "default_max_tokens": 26214,
1249      "can_reason": false,
1250      "supports_attachments": false
1251    },
1252    {
1253      "id": "mistralai/devstral-medium",
1254      "name": "Mistral: Devstral Medium",
1255      "cost_per_1m_in": 0.4,
1256      "cost_per_1m_out": 2,
1257      "cost_per_1m_in_cached": 0,
1258      "cost_per_1m_out_cached": 0.04,
1259      "context_window": 131072,
1260      "default_max_tokens": 13107,
1261      "can_reason": false,
1262      "supports_attachments": false
1263    },
1264    {
1265      "id": "mistralai/devstral-small",
1266      "name": "Mistral: Devstral Small 1.1",
1267      "cost_per_1m_in": 0.1,
1268      "cost_per_1m_out": 0.3,
1269      "cost_per_1m_in_cached": 0,
1270      "cost_per_1m_out_cached": 0.01,
1271      "context_window": 131072,
1272      "default_max_tokens": 13107,
1273      "can_reason": false,
1274      "supports_attachments": false
1275    },
1276    {
1277      "id": "mistralai/ministral-14b-2512",
1278      "name": "Mistral: Ministral 3 14B 2512",
1279      "cost_per_1m_in": 0.35,
1280      "cost_per_1m_out": 0.35,
1281      "cost_per_1m_in_cached": 0,
1282      "cost_per_1m_out_cached": 0,
1283      "context_window": 262144,
1284      "default_max_tokens": 131072,
1285      "can_reason": false,
1286      "supports_attachments": true
1287    },
1288    {
1289      "id": "mistralai/ministral-3b-2512",
1290      "name": "Mistral: Ministral 3 3B 2512",
1291      "cost_per_1m_in": 0.15,
1292      "cost_per_1m_out": 0.15,
1293      "cost_per_1m_in_cached": 0,
1294      "cost_per_1m_out_cached": 0,
1295      "context_window": 131072,
1296      "default_max_tokens": 65536,
1297      "can_reason": false,
1298      "supports_attachments": true
1299    },
1300    {
1301      "id": "mistralai/ministral-8b-2512",
1302      "name": "Mistral: Ministral 3 8B 2512",
1303      "cost_per_1m_in": 0.15,
1304      "cost_per_1m_out": 0.15,
1305      "cost_per_1m_in_cached": 0,
1306      "cost_per_1m_out_cached": 0.015,
1307      "context_window": 262144,
1308      "default_max_tokens": 26214,
1309      "can_reason": false,
1310      "supports_attachments": true
1311    },
1312    {
1313      "id": "mistralai/mistral-large-2512",
1314      "name": "Mistral: Mistral Large 3 2512",
1315      "cost_per_1m_in": 0.5,
1316      "cost_per_1m_out": 1.5,
1317      "cost_per_1m_in_cached": 0,
1318      "cost_per_1m_out_cached": 0.05,
1319      "context_window": 262144,
1320      "default_max_tokens": 26214,
1321      "can_reason": false,
1322      "supports_attachments": true
1323    },
1324    {
1325      "id": "mistralai/mistral-medium-3",
1326      "name": "Mistral: Mistral Medium 3",
1327      "cost_per_1m_in": 0.4,
1328      "cost_per_1m_out": 2,
1329      "cost_per_1m_in_cached": 0,
1330      "cost_per_1m_out_cached": 0.04,
1331      "context_window": 131072,
1332      "default_max_tokens": 13107,
1333      "can_reason": false,
1334      "supports_attachments": true
1335    },
1336    {
1337      "id": "mistralai/mistral-medium-3.1",
1338      "name": "Mistral: Mistral Medium 3.1",
1339      "cost_per_1m_in": 0.4,
1340      "cost_per_1m_out": 2,
1341      "cost_per_1m_in_cached": 0,
1342      "cost_per_1m_out_cached": 0.04,
1343      "context_window": 131072,
1344      "default_max_tokens": 13107,
1345      "can_reason": false,
1346      "supports_attachments": true
1347    },
1348    {
1349      "id": "mistralai/mistral-medium-3-5",
1350      "name": "Mistral: Mistral Medium 3.5",
1351      "cost_per_1m_in": 1.5,
1352      "cost_per_1m_out": 7.5,
1353      "cost_per_1m_in_cached": 0,
1354      "cost_per_1m_out_cached": 0,
1355      "context_window": 262144,
1356      "default_max_tokens": 26214,
1357      "can_reason": true,
1358      "reasoning_levels": [
1359        "low",
1360        "medium",
1361        "high"
1362      ],
1363      "default_reasoning_effort": "medium",
1364      "supports_attachments": true
1365    },
1366    {
1367      "id": "mistralai/mistral-nemo",
1368      "name": "Mistral: Mistral Nemo",
1369      "cost_per_1m_in": 0.15,
1370      "cost_per_1m_out": 0.15,
1371      "cost_per_1m_in_cached": 0,
1372      "cost_per_1m_out_cached": 0.015,
1373      "context_window": 131072,
1374      "default_max_tokens": 13107,
1375      "can_reason": false,
1376      "supports_attachments": false
1377    },
1378    {
1379      "id": "mistralai/mistral-small-3.2-24b-instruct",
1380      "name": "Mistral: Mistral Small 3.2 24B",
1381      "cost_per_1m_in": 0.09375,
1382      "cost_per_1m_out": 0.25,
1383      "cost_per_1m_in_cached": 0,
1384      "cost_per_1m_out_cached": 0,
1385      "context_window": 256000,
1386      "default_max_tokens": 8192,
1387      "can_reason": false,
1388      "supports_attachments": true
1389    },
1390    {
1391      "id": "mistralai/mistral-small-2603",
1392      "name": "Mistral: Mistral Small 4",
1393      "cost_per_1m_in": 0.15,
1394      "cost_per_1m_out": 0.6,
1395      "cost_per_1m_in_cached": 0,
1396      "cost_per_1m_out_cached": 0.015,
1397      "context_window": 262144,
1398      "default_max_tokens": 26214,
1399      "can_reason": true,
1400      "reasoning_levels": [
1401        "low",
1402        "medium",
1403        "high"
1404      ],
1405      "default_reasoning_effort": "medium",
1406      "supports_attachments": true
1407    },
1408    {
1409      "id": "mistralai/mixtral-8x22b-instruct",
1410      "name": "Mistral: Mixtral 8x22B Instruct",
1411      "cost_per_1m_in": 2,
1412      "cost_per_1m_out": 6,
1413      "cost_per_1m_in_cached": 0,
1414      "cost_per_1m_out_cached": 0.2,
1415      "context_window": 65536,
1416      "default_max_tokens": 6553,
1417      "can_reason": false,
1418      "supports_attachments": false
1419    },
1420    {
1421      "id": "mistralai/pixtral-large-2411",
1422      "name": "Mistral: Pixtral Large 2411",
1423      "cost_per_1m_in": 2,
1424      "cost_per_1m_out": 6,
1425      "cost_per_1m_in_cached": 0,
1426      "cost_per_1m_out_cached": 0.2,
1427      "context_window": 131072,
1428      "default_max_tokens": 13107,
1429      "can_reason": false,
1430      "supports_attachments": true
1431    },
1432    {
1433      "id": "mistralai/mistral-saba",
1434      "name": "Mistral: Saba",
1435      "cost_per_1m_in": 0.2,
1436      "cost_per_1m_out": 0.6,
1437      "cost_per_1m_in_cached": 0,
1438      "cost_per_1m_out_cached": 0.02,
1439      "context_window": 32768,
1440      "default_max_tokens": 3276,
1441      "can_reason": false,
1442      "supports_attachments": false
1443    },
1444    {
1445      "id": "mistralai/voxtral-small-24b-2507",
1446      "name": "Mistral: Voxtral Small 24B 2507",
1447      "cost_per_1m_in": 0.1,
1448      "cost_per_1m_out": 0.3,
1449      "cost_per_1m_in_cached": 0,
1450      "cost_per_1m_out_cached": 0.01,
1451      "context_window": 32000,
1452      "default_max_tokens": 3200,
1453      "can_reason": false,
1454      "supports_attachments": false
1455    },
1456    {
1457      "id": "moonshotai/kimi-k2",
1458      "name": "MoonshotAI: Kimi K2 0711",
1459      "cost_per_1m_in": 0.57,
1460      "cost_per_1m_out": 2.3,
1461      "cost_per_1m_in_cached": 0,
1462      "cost_per_1m_out_cached": 0,
1463      "context_window": 131072,
1464      "default_max_tokens": 16384,
1465      "can_reason": false,
1466      "supports_attachments": false
1467    },
1468    {
1469      "id": "moonshotai/kimi-k2-0905",
1470      "name": "MoonshotAI: Kimi K2 0905",
1471      "cost_per_1m_in": 0.6,
1472      "cost_per_1m_out": 2.5,
1473      "cost_per_1m_in_cached": 0,
1474      "cost_per_1m_out_cached": 0,
1475      "context_window": 262144,
1476      "default_max_tokens": 131072,
1477      "can_reason": false,
1478      "supports_attachments": false
1479    },
1480    {
1481      "id": "moonshotai/kimi-k2-thinking",
1482      "name": "MoonshotAI: Kimi K2 Thinking",
1483      "cost_per_1m_in": 0.6,
1484      "cost_per_1m_out": 2.5,
1485      "cost_per_1m_in_cached": 0,
1486      "cost_per_1m_out_cached": 0.15,
1487      "context_window": 262144,
1488      "default_max_tokens": 131072,
1489      "can_reason": true,
1490      "reasoning_levels": [
1491        "low",
1492        "medium",
1493        "high"
1494      ],
1495      "default_reasoning_effort": "medium",
1496      "supports_attachments": false
1497    },
1498    {
1499      "id": "moonshotai/kimi-k2.5",
1500      "name": "MoonshotAI: Kimi K2.5",
1501      "cost_per_1m_in": 0.45,
1502      "cost_per_1m_out": 2,
1503      "cost_per_1m_in_cached": 0,
1504      "cost_per_1m_out_cached": 0.1,
1505      "context_window": 262144,
1506      "default_max_tokens": 131072,
1507      "can_reason": true,
1508      "reasoning_levels": [
1509        "low",
1510        "medium",
1511        "high"
1512      ],
1513      "default_reasoning_effort": "medium",
1514      "supports_attachments": true
1515    },
1516    {
1517      "id": "moonshotai/kimi-k2.6",
1518      "name": "MoonshotAI: Kimi K2.6",
1519      "cost_per_1m_in": 0.75,
1520      "cost_per_1m_out": 3.5,
1521      "cost_per_1m_in_cached": 0,
1522      "cost_per_1m_out_cached": 0.15,
1523      "context_window": 262144,
1524      "default_max_tokens": 8192,
1525      "can_reason": true,
1526      "reasoning_levels": [
1527        "low",
1528        "medium",
1529        "high"
1530      ],
1531      "default_reasoning_effort": "medium",
1532      "supports_attachments": true
1533    },
1534    {
1535      "id": "nvidia/llama-3.3-nemotron-super-49b-v1.5",
1536      "name": "NVIDIA: Llama 3.3 Nemotron Super 49B V1.5",
1537      "cost_per_1m_in": 0.1,
1538      "cost_per_1m_out": 0.4,
1539      "cost_per_1m_in_cached": 0,
1540      "cost_per_1m_out_cached": 0,
1541      "context_window": 131072,
1542      "default_max_tokens": 8192,
1543      "can_reason": true,
1544      "reasoning_levels": [
1545        "low",
1546        "medium",
1547        "high"
1548      ],
1549      "default_reasoning_effort": "medium",
1550      "supports_attachments": false
1551    },
1552    {
1553      "id": "nvidia/nemotron-3-nano-30b-a3b",
1554      "name": "NVIDIA: Nemotron 3 Nano 30B A3B",
1555      "cost_per_1m_in": 0.05,
1556      "cost_per_1m_out": 0.2,
1557      "cost_per_1m_in_cached": 0,
1558      "cost_per_1m_out_cached": 0,
1559      "context_window": 262144,
1560      "default_max_tokens": 114000,
1561      "can_reason": true,
1562      "reasoning_levels": [
1563        "low",
1564        "medium",
1565        "high"
1566      ],
1567      "default_reasoning_effort": "medium",
1568      "supports_attachments": false
1569    },
1570    {
1571      "id": "nvidia/nemotron-3-nano-30b-a3b:free",
1572      "name": "NVIDIA: Nemotron 3 Nano 30B A3B (free)",
1573      "cost_per_1m_in": 0,
1574      "cost_per_1m_out": 0,
1575      "cost_per_1m_in_cached": 0,
1576      "cost_per_1m_out_cached": 0,
1577      "context_window": 256000,
1578      "default_max_tokens": 25600,
1579      "can_reason": true,
1580      "reasoning_levels": [
1581        "low",
1582        "medium",
1583        "high"
1584      ],
1585      "default_reasoning_effort": "medium",
1586      "supports_attachments": false
1587    },
1588    {
1589      "id": "nvidia/nemotron-3-nano-omni-30b-a3b-reasoning:free",
1590      "name": "NVIDIA: Nemotron 3 Nano Omni (free)",
1591      "cost_per_1m_in": 0,
1592      "cost_per_1m_out": 0,
1593      "cost_per_1m_in_cached": 0,
1594      "cost_per_1m_out_cached": 0,
1595      "context_window": 256000,
1596      "default_max_tokens": 32768,
1597      "can_reason": true,
1598      "reasoning_levels": [
1599        "low",
1600        "medium",
1601        "high"
1602      ],
1603      "default_reasoning_effort": "medium",
1604      "supports_attachments": true
1605    },
1606    {
1607      "id": "nvidia/nemotron-3-super-120b-a12b",
1608      "name": "NVIDIA: Nemotron 3 Super",
1609      "cost_per_1m_in": 0.1,
1610      "cost_per_1m_out": 0.5,
1611      "cost_per_1m_in_cached": 0,
1612      "cost_per_1m_out_cached": 0,
1613      "context_window": 262144,
1614      "default_max_tokens": 8192,
1615      "can_reason": true,
1616      "reasoning_levels": [
1617        "low",
1618        "medium",
1619        "high"
1620      ],
1621      "default_reasoning_effort": "medium",
1622      "supports_attachments": false
1623    },
1624    {
1625      "id": "nvidia/nemotron-3-super-120b-a12b:free",
1626      "name": "NVIDIA: Nemotron 3 Super (free)",
1627      "cost_per_1m_in": 0,
1628      "cost_per_1m_out": 0,
1629      "cost_per_1m_in_cached": 0,
1630      "cost_per_1m_out_cached": 0,
1631      "context_window": 262144,
1632      "default_max_tokens": 131072,
1633      "can_reason": true,
1634      "reasoning_levels": [
1635        "low",
1636        "medium",
1637        "high"
1638      ],
1639      "default_reasoning_effort": "medium",
1640      "supports_attachments": false
1641    },
1642    {
1643      "id": "nvidia/nemotron-nano-12b-v2-vl:free",
1644      "name": "NVIDIA: Nemotron Nano 12B 2 VL (free)",
1645      "cost_per_1m_in": 0,
1646      "cost_per_1m_out": 0,
1647      "cost_per_1m_in_cached": 0,
1648      "cost_per_1m_out_cached": 0,
1649      "context_window": 128000,
1650      "default_max_tokens": 64000,
1651      "can_reason": true,
1652      "reasoning_levels": [
1653        "low",
1654        "medium",
1655        "high"
1656      ],
1657      "default_reasoning_effort": "medium",
1658      "supports_attachments": true
1659    },
1660    {
1661      "id": "nvidia/nemotron-nano-9b-v2",
1662      "name": "NVIDIA: Nemotron Nano 9B V2",
1663      "cost_per_1m_in": 0.04,
1664      "cost_per_1m_out": 0.16,
1665      "cost_per_1m_in_cached": 0,
1666      "cost_per_1m_out_cached": 0,
1667      "context_window": 131072,
1668      "default_max_tokens": 8192,
1669      "can_reason": true,
1670      "reasoning_levels": [
1671        "low",
1672        "medium",
1673        "high"
1674      ],
1675      "default_reasoning_effort": "medium",
1676      "supports_attachments": false
1677    },
1678    {
1679      "id": "nvidia/nemotron-nano-9b-v2:free",
1680      "name": "NVIDIA: Nemotron Nano 9B V2 (free)",
1681      "cost_per_1m_in": 0,
1682      "cost_per_1m_out": 0,
1683      "cost_per_1m_in_cached": 0,
1684      "cost_per_1m_out_cached": 0,
1685      "context_window": 128000,
1686      "default_max_tokens": 12800,
1687      "can_reason": true,
1688      "reasoning_levels": [
1689        "low",
1690        "medium",
1691        "high"
1692      ],
1693      "default_reasoning_effort": "medium",
1694      "supports_attachments": false
1695    },
1696    {
1697      "id": "nex-agi/deepseek-v3.1-nex-n1",
1698      "name": "Nex AGI: DeepSeek V3.1 Nex N1",
1699      "cost_per_1m_in": 0.135,
1700      "cost_per_1m_out": 0.5,
1701      "cost_per_1m_in_cached": 0,
1702      "cost_per_1m_out_cached": 0,
1703      "context_window": 131072,
1704      "default_max_tokens": 81920,
1705      "can_reason": false,
1706      "supports_attachments": false
1707    },
1708    {
1709      "id": "openai/gpt-audio",
1710      "name": "OpenAI: GPT Audio",
1711      "cost_per_1m_in": 2.5,
1712      "cost_per_1m_out": 10,
1713      "cost_per_1m_in_cached": 0,
1714      "cost_per_1m_out_cached": 0,
1715      "context_window": 128000,
1716      "default_max_tokens": 8192,
1717      "can_reason": false,
1718      "supports_attachments": false
1719    },
1720    {
1721      "id": "openai/gpt-audio-mini",
1722      "name": "OpenAI: GPT Audio Mini",
1723      "cost_per_1m_in": 0.6,
1724      "cost_per_1m_out": 2.4,
1725      "cost_per_1m_in_cached": 0,
1726      "cost_per_1m_out_cached": 0,
1727      "context_window": 128000,
1728      "default_max_tokens": 8192,
1729      "can_reason": false,
1730      "supports_attachments": false
1731    },
1732    {
1733      "id": "openai/gpt-chat-latest",
1734      "name": "OpenAI: GPT Chat Latest",
1735      "cost_per_1m_in": 5,
1736      "cost_per_1m_out": 30,
1737      "cost_per_1m_in_cached": 0,
1738      "cost_per_1m_out_cached": 0.5,
1739      "context_window": 400000,
1740      "default_max_tokens": 64000,
1741      "can_reason": false,
1742      "supports_attachments": true
1743    },
1744    {
1745      "id": "openai/gpt-4-turbo",
1746      "name": "OpenAI: GPT-4 Turbo",
1747      "cost_per_1m_in": 10,
1748      "cost_per_1m_out": 30,
1749      "cost_per_1m_in_cached": 0,
1750      "cost_per_1m_out_cached": 0,
1751      "context_window": 128000,
1752      "default_max_tokens": 2048,
1753      "can_reason": false,
1754      "supports_attachments": true
1755    },
1756    {
1757      "id": "openai/gpt-4-1106-preview",
1758      "name": "OpenAI: GPT-4 Turbo (older v1106)",
1759      "cost_per_1m_in": 10,
1760      "cost_per_1m_out": 30,
1761      "cost_per_1m_in_cached": 0,
1762      "cost_per_1m_out_cached": 0,
1763      "context_window": 128000,
1764      "default_max_tokens": 2048,
1765      "can_reason": false,
1766      "supports_attachments": false
1767    },
1768    {
1769      "id": "openai/gpt-4-turbo-preview",
1770      "name": "OpenAI: GPT-4 Turbo Preview",
1771      "cost_per_1m_in": 10,
1772      "cost_per_1m_out": 30,
1773      "cost_per_1m_in_cached": 0,
1774      "cost_per_1m_out_cached": 0,
1775      "context_window": 128000,
1776      "default_max_tokens": 2048,
1777      "can_reason": false,
1778      "supports_attachments": false
1779    },
1780    {
1781      "id": "openai/gpt-4.1",
1782      "name": "OpenAI: GPT-4.1",
1783      "cost_per_1m_in": 2,
1784      "cost_per_1m_out": 8,
1785      "cost_per_1m_in_cached": 0,
1786      "cost_per_1m_out_cached": 0.5,
1787      "context_window": 1047576,
1788      "default_max_tokens": 16384,
1789      "can_reason": false,
1790      "supports_attachments": true
1791    },
1792    {
1793      "id": "openai/gpt-4.1-mini",
1794      "name": "OpenAI: GPT-4.1 Mini",
1795      "cost_per_1m_in": 0.4,
1796      "cost_per_1m_out": 1.6,
1797      "cost_per_1m_in_cached": 0,
1798      "cost_per_1m_out_cached": 0.1,
1799      "context_window": 1047576,
1800      "default_max_tokens": 104757,
1801      "can_reason": false,
1802      "supports_attachments": true
1803    },
1804    {
1805      "id": "openai/gpt-4.1-nano",
1806      "name": "OpenAI: GPT-4.1 Nano",
1807      "cost_per_1m_in": 0.1,
1808      "cost_per_1m_out": 0.4,
1809      "cost_per_1m_in_cached": 0,
1810      "cost_per_1m_out_cached": 0.03,
1811      "context_window": 1047576,
1812      "default_max_tokens": 104757,
1813      "can_reason": false,
1814      "supports_attachments": true
1815    },
1816    {
1817      "id": "openai/gpt-4o",
1818      "name": "OpenAI: GPT-4o",
1819      "cost_per_1m_in": 2.5,
1820      "cost_per_1m_out": 10,
1821      "cost_per_1m_in_cached": 0,
1822      "cost_per_1m_out_cached": 0,
1823      "context_window": 128000,
1824      "default_max_tokens": 8192,
1825      "can_reason": false,
1826      "supports_attachments": true
1827    },
1828    {
1829      "id": "openai/gpt-4o-2024-05-13",
1830      "name": "OpenAI: GPT-4o (2024-05-13)",
1831      "cost_per_1m_in": 5,
1832      "cost_per_1m_out": 15,
1833      "cost_per_1m_in_cached": 0,
1834      "cost_per_1m_out_cached": 0,
1835      "context_window": 128000,
1836      "default_max_tokens": 2048,
1837      "can_reason": false,
1838      "supports_attachments": true
1839    },
1840    {
1841      "id": "openai/gpt-4o-2024-08-06",
1842      "name": "OpenAI: GPT-4o (2024-08-06)",
1843      "cost_per_1m_in": 2.5,
1844      "cost_per_1m_out": 10,
1845      "cost_per_1m_in_cached": 0,
1846      "cost_per_1m_out_cached": 1.25,
1847      "context_window": 128000,
1848      "default_max_tokens": 8192,
1849      "can_reason": false,
1850      "supports_attachments": true
1851    },
1852    {
1853      "id": "openai/gpt-4o-2024-11-20",
1854      "name": "OpenAI: GPT-4o (2024-11-20)",
1855      "cost_per_1m_in": 2.5,
1856      "cost_per_1m_out": 10,
1857      "cost_per_1m_in_cached": 0,
1858      "cost_per_1m_out_cached": 1.25,
1859      "context_window": 128000,
1860      "default_max_tokens": 8192,
1861      "can_reason": false,
1862      "supports_attachments": true
1863    },
1864    {
1865      "id": "openai/gpt-4o-audio-preview",
1866      "name": "OpenAI: GPT-4o Audio",
1867      "cost_per_1m_in": 2.5,
1868      "cost_per_1m_out": 10,
1869      "cost_per_1m_in_cached": 0,
1870      "cost_per_1m_out_cached": 0,
1871      "context_window": 128000,
1872      "default_max_tokens": 8192,
1873      "can_reason": false,
1874      "supports_attachments": false
1875    },
1876    {
1877      "id": "openai/gpt-4o-mini",
1878      "name": "OpenAI: GPT-4o-mini",
1879      "cost_per_1m_in": 0.15,
1880      "cost_per_1m_out": 0.6,
1881      "cost_per_1m_in_cached": 0,
1882      "cost_per_1m_out_cached": 0.075,
1883      "context_window": 128000,
1884      "default_max_tokens": 8192,
1885      "can_reason": false,
1886      "supports_attachments": true
1887    },
1888    {
1889      "id": "openai/gpt-4o-mini-2024-07-18",
1890      "name": "OpenAI: GPT-4o-mini (2024-07-18)",
1891      "cost_per_1m_in": 0.15,
1892      "cost_per_1m_out": 0.6,
1893      "cost_per_1m_in_cached": 0,
1894      "cost_per_1m_out_cached": 0.075,
1895      "context_window": 128000,
1896      "default_max_tokens": 8192,
1897      "can_reason": false,
1898      "supports_attachments": true
1899    },
1900    {
1901      "id": "openai/gpt-5",
1902      "name": "OpenAI: GPT-5",
1903      "cost_per_1m_in": 1.25,
1904      "cost_per_1m_out": 10,
1905      "cost_per_1m_in_cached": 0,
1906      "cost_per_1m_out_cached": 0.125,
1907      "context_window": 400000,
1908      "default_max_tokens": 64000,
1909      "can_reason": true,
1910      "reasoning_levels": [
1911        "low",
1912        "medium",
1913        "high"
1914      ],
1915      "default_reasoning_effort": "medium",
1916      "supports_attachments": true
1917    },
1918    {
1919      "id": "openai/gpt-5-codex",
1920      "name": "OpenAI: GPT-5 Codex",
1921      "cost_per_1m_in": 1.25,
1922      "cost_per_1m_out": 10,
1923      "cost_per_1m_in_cached": 0,
1924      "cost_per_1m_out_cached": 0.125,
1925      "context_window": 400000,
1926      "default_max_tokens": 64000,
1927      "can_reason": true,
1928      "reasoning_levels": [
1929        "low",
1930        "medium",
1931        "high"
1932      ],
1933      "default_reasoning_effort": "medium",
1934      "supports_attachments": true
1935    },
1936    {
1937      "id": "openai/gpt-5-mini",
1938      "name": "OpenAI: GPT-5 Mini",
1939      "cost_per_1m_in": 0.25,
1940      "cost_per_1m_out": 2,
1941      "cost_per_1m_in_cached": 0,
1942      "cost_per_1m_out_cached": 0.03,
1943      "context_window": 400000,
1944      "default_max_tokens": 40000,
1945      "can_reason": true,
1946      "reasoning_levels": [
1947        "low",
1948        "medium",
1949        "high"
1950      ],
1951      "default_reasoning_effort": "medium",
1952      "supports_attachments": true
1953    },
1954    {
1955      "id": "openai/gpt-5-nano",
1956      "name": "OpenAI: GPT-5 Nano",
1957      "cost_per_1m_in": 0.05,
1958      "cost_per_1m_out": 0.4,
1959      "cost_per_1m_in_cached": 0,
1960      "cost_per_1m_out_cached": 0.01,
1961      "context_window": 400000,
1962      "default_max_tokens": 40000,
1963      "can_reason": true,
1964      "reasoning_levels": [
1965        "low",
1966        "medium",
1967        "high"
1968      ],
1969      "default_reasoning_effort": "medium",
1970      "supports_attachments": true
1971    },
1972    {
1973      "id": "openai/gpt-5-pro",
1974      "name": "OpenAI: GPT-5 Pro",
1975      "cost_per_1m_in": 15,
1976      "cost_per_1m_out": 120,
1977      "cost_per_1m_in_cached": 0,
1978      "cost_per_1m_out_cached": 0,
1979      "context_window": 400000,
1980      "default_max_tokens": 64000,
1981      "can_reason": true,
1982      "reasoning_levels": [
1983        "low",
1984        "medium",
1985        "high"
1986      ],
1987      "default_reasoning_effort": "medium",
1988      "supports_attachments": true
1989    },
1990    {
1991      "id": "openai/gpt-5.1",
1992      "name": "OpenAI: GPT-5.1",
1993      "cost_per_1m_in": 1.25,
1994      "cost_per_1m_out": 10,
1995      "cost_per_1m_in_cached": 0,
1996      "cost_per_1m_out_cached": 0.13,
1997      "context_window": 400000,
1998      "default_max_tokens": 64000,
1999      "can_reason": true,
2000      "reasoning_levels": [
2001        "low",
2002        "medium",
2003        "high"
2004      ],
2005      "default_reasoning_effort": "medium",
2006      "supports_attachments": true
2007    },
2008    {
2009      "id": "openai/gpt-5.1-chat",
2010      "name": "OpenAI: GPT-5.1 Chat",
2011      "cost_per_1m_in": 1.25,
2012      "cost_per_1m_out": 10,
2013      "cost_per_1m_in_cached": 0,
2014      "cost_per_1m_out_cached": 0.125,
2015      "context_window": 128000,
2016      "default_max_tokens": 8192,
2017      "can_reason": false,
2018      "supports_attachments": true
2019    },
2020    {
2021      "id": "openai/gpt-5.1-codex",
2022      "name": "OpenAI: GPT-5.1-Codex",
2023      "cost_per_1m_in": 1.25,
2024      "cost_per_1m_out": 10,
2025      "cost_per_1m_in_cached": 0,
2026      "cost_per_1m_out_cached": 0.125,
2027      "context_window": 400000,
2028      "default_max_tokens": 64000,
2029      "can_reason": true,
2030      "reasoning_levels": [
2031        "low",
2032        "medium",
2033        "high"
2034      ],
2035      "default_reasoning_effort": "medium",
2036      "supports_attachments": true
2037    },
2038    {
2039      "id": "openai/gpt-5.1-codex-max",
2040      "name": "OpenAI: GPT-5.1-Codex-Max",
2041      "cost_per_1m_in": 1.25,
2042      "cost_per_1m_out": 10,
2043      "cost_per_1m_in_cached": 0,
2044      "cost_per_1m_out_cached": 0.125,
2045      "context_window": 400000,
2046      "default_max_tokens": 64000,
2047      "can_reason": true,
2048      "reasoning_levels": [
2049        "low",
2050        "medium",
2051        "high"
2052      ],
2053      "default_reasoning_effort": "medium",
2054      "supports_attachments": true
2055    },
2056    {
2057      "id": "openai/gpt-5.1-codex-mini",
2058      "name": "OpenAI: GPT-5.1-Codex-Mini",
2059      "cost_per_1m_in": 0.25,
2060      "cost_per_1m_out": 2,
2061      "cost_per_1m_in_cached": 0,
2062      "cost_per_1m_out_cached": 0.025,
2063      "context_window": 400000,
2064      "default_max_tokens": 50000,
2065      "can_reason": true,
2066      "reasoning_levels": [
2067        "low",
2068        "medium",
2069        "high"
2070      ],
2071      "default_reasoning_effort": "medium",
2072      "supports_attachments": true
2073    },
2074    {
2075      "id": "openai/gpt-5.2",
2076      "name": "OpenAI: GPT-5.2",
2077      "cost_per_1m_in": 1.75,
2078      "cost_per_1m_out": 14,
2079      "cost_per_1m_in_cached": 0,
2080      "cost_per_1m_out_cached": 0.175,
2081      "context_window": 400000,
2082      "default_max_tokens": 64000,
2083      "can_reason": true,
2084      "reasoning_levels": [
2085        "low",
2086        "medium",
2087        "high"
2088      ],
2089      "default_reasoning_effort": "medium",
2090      "supports_attachments": true
2091    },
2092    {
2093      "id": "openai/gpt-5.2-chat",
2094      "name": "OpenAI: GPT-5.2 Chat",
2095      "cost_per_1m_in": 1.75,
2096      "cost_per_1m_out": 14,
2097      "cost_per_1m_in_cached": 0,
2098      "cost_per_1m_out_cached": 0.175,
2099      "context_window": 128000,
2100      "default_max_tokens": 8192,
2101      "can_reason": false,
2102      "supports_attachments": true
2103    },
2104    {
2105      "id": "openai/gpt-5.2-pro",
2106      "name": "OpenAI: GPT-5.2 Pro",
2107      "cost_per_1m_in": 21,
2108      "cost_per_1m_out": 168,
2109      "cost_per_1m_in_cached": 0,
2110      "cost_per_1m_out_cached": 0,
2111      "context_window": 400000,
2112      "default_max_tokens": 64000,
2113      "can_reason": true,
2114      "reasoning_levels": [
2115        "low",
2116        "medium",
2117        "high"
2118      ],
2119      "default_reasoning_effort": "medium",
2120      "supports_attachments": true
2121    },
2122    {
2123      "id": "openai/gpt-5.2-codex",
2124      "name": "OpenAI: GPT-5.2-Codex",
2125      "cost_per_1m_in": 1.75,
2126      "cost_per_1m_out": 14,
2127      "cost_per_1m_in_cached": 0,
2128      "cost_per_1m_out_cached": 0.175,
2129      "context_window": 400000,
2130      "default_max_tokens": 64000,
2131      "can_reason": true,
2132      "reasoning_levels": [
2133        "low",
2134        "medium",
2135        "high"
2136      ],
2137      "default_reasoning_effort": "medium",
2138      "supports_attachments": true
2139    },
2140    {
2141      "id": "openai/gpt-5.3-chat",
2142      "name": "OpenAI: GPT-5.3 Chat",
2143      "cost_per_1m_in": 1.75,
2144      "cost_per_1m_out": 14,
2145      "cost_per_1m_in_cached": 0,
2146      "cost_per_1m_out_cached": 0.175,
2147      "context_window": 128000,
2148      "default_max_tokens": 8192,
2149      "can_reason": false,
2150      "supports_attachments": true
2151    },
2152    {
2153      "id": "openai/gpt-5.3-codex",
2154      "name": "OpenAI: GPT-5.3-Codex",
2155      "cost_per_1m_in": 1.75,
2156      "cost_per_1m_out": 14,
2157      "cost_per_1m_in_cached": 0,
2158      "cost_per_1m_out_cached": 0.175,
2159      "context_window": 400000,
2160      "default_max_tokens": 64000,
2161      "can_reason": true,
2162      "reasoning_levels": [
2163        "low",
2164        "medium",
2165        "high"
2166      ],
2167      "default_reasoning_effort": "medium",
2168      "supports_attachments": true
2169    },
2170    {
2171      "id": "openai/gpt-5.4",
2172      "name": "OpenAI: GPT-5.4",
2173      "cost_per_1m_in": 2.5,
2174      "cost_per_1m_out": 15,
2175      "cost_per_1m_in_cached": 0,
2176      "cost_per_1m_out_cached": 0.25,
2177      "context_window": 1050000,
2178      "default_max_tokens": 64000,
2179      "can_reason": true,
2180      "reasoning_levels": [
2181        "low",
2182        "medium",
2183        "high"
2184      ],
2185      "default_reasoning_effort": "medium",
2186      "supports_attachments": true
2187    },
2188    {
2189      "id": "openai/gpt-5.4-mini",
2190      "name": "OpenAI: GPT-5.4 Mini",
2191      "cost_per_1m_in": 0.75,
2192      "cost_per_1m_out": 4.5,
2193      "cost_per_1m_in_cached": 0,
2194      "cost_per_1m_out_cached": 0.075,
2195      "context_window": 400000,
2196      "default_max_tokens": 64000,
2197      "can_reason": true,
2198      "reasoning_levels": [
2199        "low",
2200        "medium",
2201        "high"
2202      ],
2203      "default_reasoning_effort": "medium",
2204      "supports_attachments": true
2205    },
2206    {
2207      "id": "openai/gpt-5.4-nano",
2208      "name": "OpenAI: GPT-5.4 Nano",
2209      "cost_per_1m_in": 0.2,
2210      "cost_per_1m_out": 1.25,
2211      "cost_per_1m_in_cached": 0,
2212      "cost_per_1m_out_cached": 0.02,
2213      "context_window": 400000,
2214      "default_max_tokens": 64000,
2215      "can_reason": true,
2216      "reasoning_levels": [
2217        "low",
2218        "medium",
2219        "high"
2220      ],
2221      "default_reasoning_effort": "medium",
2222      "supports_attachments": true
2223    },
2224    {
2225      "id": "openai/gpt-5.4-pro",
2226      "name": "OpenAI: GPT-5.4 Pro",
2227      "cost_per_1m_in": 30,
2228      "cost_per_1m_out": 180,
2229      "cost_per_1m_in_cached": 0,
2230      "cost_per_1m_out_cached": 0,
2231      "context_window": 1050000,
2232      "default_max_tokens": 64000,
2233      "can_reason": true,
2234      "reasoning_levels": [
2235        "low",
2236        "medium",
2237        "high"
2238      ],
2239      "default_reasoning_effort": "medium",
2240      "supports_attachments": true
2241    },
2242    {
2243      "id": "openai/gpt-5.5",
2244      "name": "OpenAI: GPT-5.5",
2245      "cost_per_1m_in": 5,
2246      "cost_per_1m_out": 30,
2247      "cost_per_1m_in_cached": 0,
2248      "cost_per_1m_out_cached": 0.5,
2249      "context_window": 1050000,
2250      "default_max_tokens": 64000,
2251      "can_reason": true,
2252      "reasoning_levels": [
2253        "low",
2254        "medium",
2255        "high"
2256      ],
2257      "default_reasoning_effort": "medium",
2258      "supports_attachments": true
2259    },
2260    {
2261      "id": "openai/gpt-5.5-pro",
2262      "name": "OpenAI: GPT-5.5 Pro",
2263      "cost_per_1m_in": 30,
2264      "cost_per_1m_out": 180,
2265      "cost_per_1m_in_cached": 0,
2266      "cost_per_1m_out_cached": 0,
2267      "context_window": 1050000,
2268      "default_max_tokens": 64000,
2269      "can_reason": true,
2270      "reasoning_levels": [
2271        "low",
2272        "medium",
2273        "high"
2274      ],
2275      "default_reasoning_effort": "medium",
2276      "supports_attachments": true
2277    },
2278    {
2279      "id": "openai/gpt-oss-120b",
2280      "name": "OpenAI: gpt-oss-120b",
2281      "cost_per_1m_in": 0.05,
2282      "cost_per_1m_out": 0.25,
2283      "cost_per_1m_in_cached": 0,
2284      "cost_per_1m_out_cached": 0,
2285      "context_window": 131072,
2286      "default_max_tokens": 16384,
2287      "can_reason": true,
2288      "reasoning_levels": [
2289        "low",
2290        "medium",
2291        "high"
2292      ],
2293      "default_reasoning_effort": "medium",
2294      "supports_attachments": false
2295    },
2296    {
2297      "id": "openai/gpt-oss-120b:free",
2298      "name": "OpenAI: gpt-oss-120b (free)",
2299      "cost_per_1m_in": 0,
2300      "cost_per_1m_out": 0,
2301      "cost_per_1m_in_cached": 0,
2302      "cost_per_1m_out_cached": 0,
2303      "context_window": 131072,
2304      "default_max_tokens": 65536,
2305      "can_reason": true,
2306      "reasoning_levels": [
2307        "low",
2308        "medium",
2309        "high"
2310      ],
2311      "default_reasoning_effort": "medium",
2312      "supports_attachments": false
2313    },
2314    {
2315      "id": "openai/gpt-oss-20b",
2316      "name": "OpenAI: gpt-oss-20b",
2317      "cost_per_1m_in": 0.03,
2318      "cost_per_1m_out": 0.14,
2319      "cost_per_1m_in_cached": 0,
2320      "cost_per_1m_out_cached": 0,
2321      "context_window": 131072,
2322      "default_max_tokens": 65536,
2323      "can_reason": true,
2324      "reasoning_levels": [
2325        "low",
2326        "medium",
2327        "high"
2328      ],
2329      "default_reasoning_effort": "medium",
2330      "supports_attachments": false
2331    },
2332    {
2333      "id": "openai/gpt-oss-20b:free",
2334      "name": "OpenAI: gpt-oss-20b (free)",
2335      "cost_per_1m_in": 0,
2336      "cost_per_1m_out": 0,
2337      "cost_per_1m_in_cached": 0,
2338      "cost_per_1m_out_cached": 0,
2339      "context_window": 131072,
2340      "default_max_tokens": 4096,
2341      "can_reason": true,
2342      "reasoning_levels": [
2343        "low",
2344        "medium",
2345        "high"
2346      ],
2347      "default_reasoning_effort": "medium",
2348      "supports_attachments": false
2349    },
2350    {
2351      "id": "openai/gpt-oss-safeguard-20b",
2352      "name": "OpenAI: gpt-oss-safeguard-20b",
2353      "cost_per_1m_in": 0.075,
2354      "cost_per_1m_out": 0.3,
2355      "cost_per_1m_in_cached": 0,
2356      "cost_per_1m_out_cached": 0.037,
2357      "context_window": 131072,
2358      "default_max_tokens": 32768,
2359      "can_reason": true,
2360      "reasoning_levels": [
2361        "low",
2362        "medium",
2363        "high"
2364      ],
2365      "default_reasoning_effort": "medium",
2366      "supports_attachments": false
2367    },
2368    {
2369      "id": "openai/o1",
2370      "name": "OpenAI: o1",
2371      "cost_per_1m_in": 15,
2372      "cost_per_1m_out": 60,
2373      "cost_per_1m_in_cached": 0,
2374      "cost_per_1m_out_cached": 7.5,
2375      "context_window": 200000,
2376      "default_max_tokens": 50000,
2377      "can_reason": true,
2378      "reasoning_levels": [
2379        "low",
2380        "medium",
2381        "high"
2382      ],
2383      "default_reasoning_effort": "medium",
2384      "supports_attachments": true
2385    },
2386    {
2387      "id": "openai/o3",
2388      "name": "OpenAI: o3",
2389      "cost_per_1m_in": 2,
2390      "cost_per_1m_out": 8,
2391      "cost_per_1m_in_cached": 0,
2392      "cost_per_1m_out_cached": 0.5,
2393      "context_window": 200000,
2394      "default_max_tokens": 50000,
2395      "can_reason": true,
2396      "reasoning_levels": [
2397        "low",
2398        "medium",
2399        "high"
2400      ],
2401      "default_reasoning_effort": "medium",
2402      "supports_attachments": true
2403    },
2404    {
2405      "id": "openai/o3-deep-research",
2406      "name": "OpenAI: o3 Deep Research",
2407      "cost_per_1m_in": 10,
2408      "cost_per_1m_out": 40,
2409      "cost_per_1m_in_cached": 0,
2410      "cost_per_1m_out_cached": 2.5,
2411      "context_window": 200000,
2412      "default_max_tokens": 50000,
2413      "can_reason": true,
2414      "reasoning_levels": [
2415        "low",
2416        "medium",
2417        "high"
2418      ],
2419      "default_reasoning_effort": "medium",
2420      "supports_attachments": true
2421    },
2422    {
2423      "id": "openai/o3-mini",
2424      "name": "OpenAI: o3 Mini",
2425      "cost_per_1m_in": 1.1,
2426      "cost_per_1m_out": 4.4,
2427      "cost_per_1m_in_cached": 0,
2428      "cost_per_1m_out_cached": 0.55,
2429      "context_window": 200000,
2430      "default_max_tokens": 50000,
2431      "can_reason": true,
2432      "reasoning_levels": [
2433        "low",
2434        "medium",
2435        "high"
2436      ],
2437      "default_reasoning_effort": "medium",
2438      "supports_attachments": false
2439    },
2440    {
2441      "id": "openai/o3-mini-high",
2442      "name": "OpenAI: o3 Mini High",
2443      "cost_per_1m_in": 1.1,
2444      "cost_per_1m_out": 4.4,
2445      "cost_per_1m_in_cached": 0,
2446      "cost_per_1m_out_cached": 0.55,
2447      "context_window": 200000,
2448      "default_max_tokens": 50000,
2449      "can_reason": true,
2450      "reasoning_levels": [
2451        "low",
2452        "medium",
2453        "high"
2454      ],
2455      "default_reasoning_effort": "medium",
2456      "supports_attachments": false
2457    },
2458    {
2459      "id": "openai/o3-pro",
2460      "name": "OpenAI: o3 Pro",
2461      "cost_per_1m_in": 20,
2462      "cost_per_1m_out": 80,
2463      "cost_per_1m_in_cached": 0,
2464      "cost_per_1m_out_cached": 0,
2465      "context_window": 200000,
2466      "default_max_tokens": 50000,
2467      "can_reason": true,
2468      "reasoning_levels": [
2469        "low",
2470        "medium",
2471        "high"
2472      ],
2473      "default_reasoning_effort": "medium",
2474      "supports_attachments": true
2475    },
2476    {
2477      "id": "openai/o4-mini",
2478      "name": "OpenAI: o4 Mini",
2479      "cost_per_1m_in": 1.1,
2480      "cost_per_1m_out": 4.4,
2481      "cost_per_1m_in_cached": 0,
2482      "cost_per_1m_out_cached": 0.275,
2483      "context_window": 200000,
2484      "default_max_tokens": 50000,
2485      "can_reason": true,
2486      "reasoning_levels": [
2487        "low",
2488        "medium",
2489        "high"
2490      ],
2491      "default_reasoning_effort": "medium",
2492      "supports_attachments": true
2493    },
2494    {
2495      "id": "openai/o4-mini-deep-research",
2496      "name": "OpenAI: o4 Mini Deep Research",
2497      "cost_per_1m_in": 2,
2498      "cost_per_1m_out": 8,
2499      "cost_per_1m_in_cached": 0,
2500      "cost_per_1m_out_cached": 0.5,
2501      "context_window": 200000,
2502      "default_max_tokens": 50000,
2503      "can_reason": true,
2504      "reasoning_levels": [
2505        "low",
2506        "medium",
2507        "high"
2508      ],
2509      "default_reasoning_effort": "medium",
2510      "supports_attachments": true
2511    },
2512    {
2513      "id": "openai/o4-mini-high",
2514      "name": "OpenAI: o4 Mini High",
2515      "cost_per_1m_in": 1.1,
2516      "cost_per_1m_out": 4.4,
2517      "cost_per_1m_in_cached": 0,
2518      "cost_per_1m_out_cached": 0.275,
2519      "context_window": 200000,
2520      "default_max_tokens": 50000,
2521      "can_reason": true,
2522      "reasoning_levels": [
2523        "low",
2524        "medium",
2525        "high"
2526      ],
2527      "default_reasoning_effort": "medium",
2528      "supports_attachments": true
2529    },
2530    {
2531      "id": "openrouter/owl-alpha",
2532      "name": "Owl Alpha",
2533      "cost_per_1m_in": 0,
2534      "cost_per_1m_out": 0,
2535      "cost_per_1m_in_cached": 0,
2536      "cost_per_1m_out_cached": 0,
2537      "context_window": 1048756,
2538      "default_max_tokens": 131072,
2539      "can_reason": false,
2540      "supports_attachments": false
2541    },
2542    {
2543      "id": "poolside/laguna-m.1:free",
2544      "name": "Poolside: Laguna M.1 (free)",
2545      "cost_per_1m_in": 0,
2546      "cost_per_1m_out": 0,
2547      "cost_per_1m_in_cached": 0,
2548      "cost_per_1m_out_cached": 0,
2549      "context_window": 131072,
2550      "default_max_tokens": 4096,
2551      "can_reason": true,
2552      "reasoning_levels": [
2553        "low",
2554        "medium",
2555        "high"
2556      ],
2557      "default_reasoning_effort": "medium",
2558      "supports_attachments": false
2559    },
2560    {
2561      "id": "poolside/laguna-xs.2:free",
2562      "name": "Poolside: Laguna XS.2 (free)",
2563      "cost_per_1m_in": 0,
2564      "cost_per_1m_out": 0,
2565      "cost_per_1m_in_cached": 0,
2566      "cost_per_1m_out_cached": 0,
2567      "context_window": 131072,
2568      "default_max_tokens": 4096,
2569      "can_reason": true,
2570      "reasoning_levels": [
2571        "low",
2572        "medium",
2573        "high"
2574      ],
2575      "default_reasoning_effort": "medium",
2576      "supports_attachments": false
2577    },
2578    {
2579      "id": "prime-intellect/intellect-3",
2580      "name": "Prime Intellect: INTELLECT-3",
2581      "cost_per_1m_in": 0.2,
2582      "cost_per_1m_out": 1.1,
2583      "cost_per_1m_in_cached": 0,
2584      "cost_per_1m_out_cached": 0,
2585      "context_window": 131072,
2586      "default_max_tokens": 65536,
2587      "can_reason": true,
2588      "reasoning_levels": [
2589        "low",
2590        "medium",
2591        "high"
2592      ],
2593      "default_reasoning_effort": "medium",
2594      "supports_attachments": false
2595    },
2596    {
2597      "id": "qwen/qwen-2.5-72b-instruct",
2598      "name": "Qwen2.5 72B Instruct",
2599      "cost_per_1m_in": 0.36,
2600      "cost_per_1m_out": 0.4,
2601      "cost_per_1m_in_cached": 0,
2602      "cost_per_1m_out_cached": 0,
2603      "context_window": 32768,
2604      "default_max_tokens": 8192,
2605      "can_reason": false,
2606      "supports_attachments": false
2607    },
2608    {
2609      "id": "qwen/qwen-plus-2025-07-28",
2610      "name": "Qwen: Qwen Plus 0728",
2611      "cost_per_1m_in": 0.26,
2612      "cost_per_1m_out": 0.78,
2613      "cost_per_1m_in_cached": 0.325,
2614      "cost_per_1m_out_cached": 0,
2615      "context_window": 1000000,
2616      "default_max_tokens": 16384,
2617      "can_reason": false,
2618      "supports_attachments": false
2619    },
2620    {
2621      "id": "qwen/qwen-plus-2025-07-28:thinking",
2622      "name": "Qwen: Qwen Plus 0728 (thinking)",
2623      "cost_per_1m_in": 0.26,
2624      "cost_per_1m_out": 0.78,
2625      "cost_per_1m_in_cached": 0.325,
2626      "cost_per_1m_out_cached": 0,
2627      "context_window": 1000000,
2628      "default_max_tokens": 16384,
2629      "can_reason": true,
2630      "reasoning_levels": [
2631        "low",
2632        "medium",
2633        "high"
2634      ],
2635      "default_reasoning_effort": "medium",
2636      "supports_attachments": false
2637    },
2638    {
2639      "id": "qwen/qwen-vl-max",
2640      "name": "Qwen: Qwen VL Max",
2641      "cost_per_1m_in": 0.52,
2642      "cost_per_1m_out": 2.08,
2643      "cost_per_1m_in_cached": 0,
2644      "cost_per_1m_out_cached": 0,
2645      "context_window": 131072,
2646      "default_max_tokens": 16384,
2647      "can_reason": false,
2648      "supports_attachments": true
2649    },
2650    {
2651      "id": "qwen/qwen-max",
2652      "name": "Qwen: Qwen-Max ",
2653      "cost_per_1m_in": 1.04,
2654      "cost_per_1m_out": 4.16,
2655      "cost_per_1m_in_cached": 0,
2656      "cost_per_1m_out_cached": 0.208,
2657      "context_window": 32768,
2658      "default_max_tokens": 4096,
2659      "can_reason": false,
2660      "supports_attachments": false
2661    },
2662    {
2663      "id": "qwen/qwen-plus",
2664      "name": "Qwen: Qwen-Plus",
2665      "cost_per_1m_in": 0.26,
2666      "cost_per_1m_out": 0.78,
2667      "cost_per_1m_in_cached": 0.325,
2668      "cost_per_1m_out_cached": 0.052,
2669      "context_window": 1000000,
2670      "default_max_tokens": 16384,
2671      "can_reason": false,
2672      "supports_attachments": false
2673    },
2674    {
2675      "id": "qwen/qwen-turbo",
2676      "name": "Qwen: Qwen-Turbo",
2677      "cost_per_1m_in": 0.0325,
2678      "cost_per_1m_out": 0.13,
2679      "cost_per_1m_in_cached": 0,
2680      "cost_per_1m_out_cached": 0.0065,
2681      "context_window": 131072,
2682      "default_max_tokens": 4096,
2683      "can_reason": false,
2684      "supports_attachments": false
2685    },
2686    {
2687      "id": "qwen/qwen3-14b",
2688      "name": "Qwen: Qwen3 14B",
2689      "cost_per_1m_in": 0.2275,
2690      "cost_per_1m_out": 0.91,
2691      "cost_per_1m_in_cached": 0,
2692      "cost_per_1m_out_cached": 0,
2693      "context_window": 131072,
2694      "default_max_tokens": 4096,
2695      "can_reason": true,
2696      "reasoning_levels": [
2697        "low",
2698        "medium",
2699        "high"
2700      ],
2701      "default_reasoning_effort": "medium",
2702      "supports_attachments": false
2703    },
2704    {
2705      "id": "qwen/qwen3-235b-a22b",
2706      "name": "Qwen: Qwen3 235B A22B",
2707      "cost_per_1m_in": 0.455,
2708      "cost_per_1m_out": 1.82,
2709      "cost_per_1m_in_cached": 0,
2710      "cost_per_1m_out_cached": 0,
2711      "context_window": 131072,
2712      "default_max_tokens": 4096,
2713      "can_reason": true,
2714      "reasoning_levels": [
2715        "low",
2716        "medium",
2717        "high"
2718      ],
2719      "default_reasoning_effort": "medium",
2720      "supports_attachments": false
2721    },
2722    {
2723      "id": "qwen/qwen3-235b-a22b-2507",
2724      "name": "Qwen: Qwen3 235B A22B Instruct 2507",
2725      "cost_per_1m_in": 0.2,
2726      "cost_per_1m_out": 0.8,
2727      "cost_per_1m_in_cached": 0,
2728      "cost_per_1m_out_cached": 0,
2729      "context_window": 262144,
2730      "default_max_tokens": 131072,
2731      "can_reason": false,
2732      "supports_attachments": false
2733    },
2734    {
2735      "id": "qwen/qwen3-235b-a22b-thinking-2507",
2736      "name": "Qwen: Qwen3 235B A22B Thinking 2507",
2737      "cost_per_1m_in": 0.1495,
2738      "cost_per_1m_out": 1.495,
2739      "cost_per_1m_in_cached": 0,
2740      "cost_per_1m_out_cached": 0,
2741      "context_window": 131072,
2742      "default_max_tokens": 13107,
2743      "can_reason": true,
2744      "reasoning_levels": [
2745        "low",
2746        "medium",
2747        "high"
2748      ],
2749      "default_reasoning_effort": "medium",
2750      "supports_attachments": false
2751    },
2752    {
2753      "id": "qwen/qwen3-30b-a3b",
2754      "name": "Qwen: Qwen3 30B A3B",
2755      "cost_per_1m_in": 0.13,
2756      "cost_per_1m_out": 0.52,
2757      "cost_per_1m_in_cached": 0,
2758      "cost_per_1m_out_cached": 0,
2759      "context_window": 131072,
2760      "default_max_tokens": 4096,
2761      "can_reason": true,
2762      "reasoning_levels": [
2763        "low",
2764        "medium",
2765        "high"
2766      ],
2767      "default_reasoning_effort": "medium",
2768      "supports_attachments": false
2769    },
2770    {
2771      "id": "qwen/qwen3-30b-a3b-instruct-2507",
2772      "name": "Qwen: Qwen3 30B A3B Instruct 2507",
2773      "cost_per_1m_in": 0.1,
2774      "cost_per_1m_out": 0.3,
2775      "cost_per_1m_in_cached": 0,
2776      "cost_per_1m_out_cached": 0,
2777      "context_window": 262144,
2778      "default_max_tokens": 26214,
2779      "can_reason": false,
2780      "supports_attachments": false
2781    },
2782    {
2783      "id": "qwen/qwen3-30b-a3b-thinking-2507",
2784      "name": "Qwen: Qwen3 30B A3B Thinking 2507",
2785      "cost_per_1m_in": 0.08,
2786      "cost_per_1m_out": 0.4,
2787      "cost_per_1m_in_cached": 0,
2788      "cost_per_1m_out_cached": 0.08,
2789      "context_window": 131072,
2790      "default_max_tokens": 65536,
2791      "can_reason": true,
2792      "reasoning_levels": [
2793        "low",
2794        "medium",
2795        "high"
2796      ],
2797      "default_reasoning_effort": "medium",
2798      "supports_attachments": false
2799    },
2800    {
2801      "id": "qwen/qwen3-32b",
2802      "name": "Qwen: Qwen3 32B",
2803      "cost_per_1m_in": 0.104,
2804      "cost_per_1m_out": 0.416,
2805      "cost_per_1m_in_cached": 0,
2806      "cost_per_1m_out_cached": 0,
2807      "context_window": 131072,
2808      "default_max_tokens": 4096,
2809      "can_reason": true,
2810      "reasoning_levels": [
2811        "low",
2812        "medium",
2813        "high"
2814      ],
2815      "default_reasoning_effort": "medium",
2816      "supports_attachments": false
2817    },
2818    {
2819      "id": "qwen/qwen3-8b",
2820      "name": "Qwen: Qwen3 8B",
2821      "cost_per_1m_in": 0.117,
2822      "cost_per_1m_out": 0.455,
2823      "cost_per_1m_in_cached": 0,
2824      "cost_per_1m_out_cached": 0,
2825      "context_window": 131072,
2826      "default_max_tokens": 4096,
2827      "can_reason": true,
2828      "reasoning_levels": [
2829        "low",
2830        "medium",
2831        "high"
2832      ],
2833      "default_reasoning_effort": "medium",
2834      "supports_attachments": false
2835    },
2836    {
2837      "id": "qwen/qwen3-coder-30b-a3b-instruct",
2838      "name": "Qwen: Qwen3 Coder 30B A3B Instruct",
2839      "cost_per_1m_in": 0.07,
2840      "cost_per_1m_out": 0.28,
2841      "cost_per_1m_in_cached": 0,
2842      "cost_per_1m_out_cached": 0,
2843      "context_window": 262144,
2844      "default_max_tokens": 131072,
2845      "can_reason": false,
2846      "supports_attachments": false
2847    },
2848    {
2849      "id": "qwen/qwen3-coder",
2850      "name": "Qwen: Qwen3 Coder 480B A35B",
2851      "cost_per_1m_in": 0.22,
2852      "cost_per_1m_out": 1.8,
2853      "cost_per_1m_in_cached": 0,
2854      "cost_per_1m_out_cached": 0,
2855      "context_window": 262144,
2856      "default_max_tokens": 32768,
2857      "can_reason": false,
2858      "supports_attachments": false
2859    },
2860    {
2861      "id": "qwen/qwen3-coder:free",
2862      "name": "Qwen: Qwen3 Coder 480B A35B (free)",
2863      "cost_per_1m_in": 0,
2864      "cost_per_1m_out": 0,
2865      "cost_per_1m_in_cached": 0,
2866      "cost_per_1m_out_cached": 0,
2867      "context_window": 262000,
2868      "default_max_tokens": 131000,
2869      "can_reason": false,
2870      "supports_attachments": false
2871    },
2872    {
2873      "id": "qwen/qwen3-coder-flash",
2874      "name": "Qwen: Qwen3 Coder Flash",
2875      "cost_per_1m_in": 0.195,
2876      "cost_per_1m_out": 0.975,
2877      "cost_per_1m_in_cached": 0.24375,
2878      "cost_per_1m_out_cached": 0.039,
2879      "context_window": 1000000,
2880      "default_max_tokens": 32768,
2881      "can_reason": false,
2882      "supports_attachments": false
2883    },
2884    {
2885      "id": "qwen/qwen3-coder-next",
2886      "name": "Qwen: Qwen3 Coder Next",
2887      "cost_per_1m_in": 0.11,
2888      "cost_per_1m_out": 0.8,
2889      "cost_per_1m_in_cached": 0,
2890      "cost_per_1m_out_cached": 0.07,
2891      "context_window": 262144,
2892      "default_max_tokens": 131072,
2893      "can_reason": false,
2894      "supports_attachments": false
2895    },
2896    {
2897      "id": "qwen/qwen3-coder-plus",
2898      "name": "Qwen: Qwen3 Coder Plus",
2899      "cost_per_1m_in": 0.65,
2900      "cost_per_1m_out": 3.25,
2901      "cost_per_1m_in_cached": 0.8125,
2902      "cost_per_1m_out_cached": 0.13,
2903      "context_window": 1000000,
2904      "default_max_tokens": 32768,
2905      "can_reason": false,
2906      "supports_attachments": false
2907    },
2908    {
2909      "id": "qwen/qwen3-max",
2910      "name": "Qwen: Qwen3 Max",
2911      "cost_per_1m_in": 0.78,
2912      "cost_per_1m_out": 3.9,
2913      "cost_per_1m_in_cached": 0.975,
2914      "cost_per_1m_out_cached": 0.156,
2915      "context_window": 262144,
2916      "default_max_tokens": 16384,
2917      "can_reason": false,
2918      "supports_attachments": false
2919    },
2920    {
2921      "id": "qwen/qwen3-max-thinking",
2922      "name": "Qwen: Qwen3 Max Thinking",
2923      "cost_per_1m_in": 0.78,
2924      "cost_per_1m_out": 3.9,
2925      "cost_per_1m_in_cached": 0,
2926      "cost_per_1m_out_cached": 0,
2927      "context_window": 262144,
2928      "default_max_tokens": 16384,
2929      "can_reason": true,
2930      "reasoning_levels": [
2931        "low",
2932        "medium",
2933        "high"
2934      ],
2935      "default_reasoning_effort": "medium",
2936      "supports_attachments": false
2937    },
2938    {
2939      "id": "qwen/qwen3-next-80b-a3b-instruct",
2940      "name": "Qwen: Qwen3 Next 80B A3B Instruct",
2941      "cost_per_1m_in": 0.09,
2942      "cost_per_1m_out": 1.1,
2943      "cost_per_1m_in_cached": 0,
2944      "cost_per_1m_out_cached": 0,
2945      "context_window": 262144,
2946      "default_max_tokens": 8192,
2947      "can_reason": false,
2948      "supports_attachments": false
2949    },
2950    {
2951      "id": "qwen/qwen3-next-80b-a3b-instruct:free",
2952      "name": "Qwen: Qwen3 Next 80B A3B Instruct (free)",
2953      "cost_per_1m_in": 0,
2954      "cost_per_1m_out": 0,
2955      "cost_per_1m_in_cached": 0,
2956      "cost_per_1m_out_cached": 0,
2957      "context_window": 262144,
2958      "default_max_tokens": 26214,
2959      "can_reason": false,
2960      "supports_attachments": false
2961    },
2962    {
2963      "id": "qwen/qwen3-next-80b-a3b-thinking",
2964      "name": "Qwen: Qwen3 Next 80B A3B Thinking",
2965      "cost_per_1m_in": 0.0975,
2966      "cost_per_1m_out": 0.78,
2967      "cost_per_1m_in_cached": 0,
2968      "cost_per_1m_out_cached": 0,
2969      "context_window": 131072,
2970      "default_max_tokens": 16384,
2971      "can_reason": true,
2972      "reasoning_levels": [
2973        "low",
2974        "medium",
2975        "high"
2976      ],
2977      "default_reasoning_effort": "medium",
2978      "supports_attachments": false
2979    },
2980    {
2981      "id": "qwen/qwen3-vl-235b-a22b-instruct",
2982      "name": "Qwen: Qwen3 VL 235B A22B Instruct",
2983      "cost_per_1m_in": 0.26,
2984      "cost_per_1m_out": 1.04,
2985      "cost_per_1m_in_cached": 0,
2986      "cost_per_1m_out_cached": 0,
2987      "context_window": 131072,
2988      "default_max_tokens": 16384,
2989      "can_reason": false,
2990      "supports_attachments": true
2991    },
2992    {
2993      "id": "qwen/qwen3-vl-235b-a22b-thinking",
2994      "name": "Qwen: Qwen3 VL 235B A22B Thinking",
2995      "cost_per_1m_in": 0.26,
2996      "cost_per_1m_out": 2.6,
2997      "cost_per_1m_in_cached": 0,
2998      "cost_per_1m_out_cached": 0,
2999      "context_window": 131072,
3000      "default_max_tokens": 16384,
3001      "can_reason": true,
3002      "reasoning_levels": [
3003        "low",
3004        "medium",
3005        "high"
3006      ],
3007      "default_reasoning_effort": "medium",
3008      "supports_attachments": true
3009    },
3010    {
3011      "id": "qwen/qwen3-vl-30b-a3b-instruct",
3012      "name": "Qwen: Qwen3 VL 30B A3B Instruct",
3013      "cost_per_1m_in": 0.15,
3014      "cost_per_1m_out": 0.6,
3015      "cost_per_1m_in_cached": 0,
3016      "cost_per_1m_out_cached": 0,
3017      "context_window": 262144,
3018      "default_max_tokens": 8192,
3019      "can_reason": false,
3020      "supports_attachments": true
3021    },
3022    {
3023      "id": "qwen/qwen3-vl-30b-a3b-thinking",
3024      "name": "Qwen: Qwen3 VL 30B A3B Thinking",
3025      "cost_per_1m_in": 0.13,
3026      "cost_per_1m_out": 1.56,
3027      "cost_per_1m_in_cached": 0,
3028      "cost_per_1m_out_cached": 0,
3029      "context_window": 131072,
3030      "default_max_tokens": 16384,
3031      "can_reason": true,
3032      "reasoning_levels": [
3033        "low",
3034        "medium",
3035        "high"
3036      ],
3037      "default_reasoning_effort": "medium",
3038      "supports_attachments": true
3039    },
3040    {
3041      "id": "qwen/qwen3-vl-32b-instruct",
3042      "name": "Qwen: Qwen3 VL 32B Instruct",
3043      "cost_per_1m_in": 0.104,
3044      "cost_per_1m_out": 0.416,
3045      "cost_per_1m_in_cached": 0,
3046      "cost_per_1m_out_cached": 0,
3047      "context_window": 131072,
3048      "default_max_tokens": 16384,
3049      "can_reason": false,
3050      "supports_attachments": true
3051    },
3052    {
3053      "id": "qwen/qwen3-vl-8b-instruct",
3054      "name": "Qwen: Qwen3 VL 8B Instruct",
3055      "cost_per_1m_in": 0.25,
3056      "cost_per_1m_out": 0.75,
3057      "cost_per_1m_in_cached": 0,
3058      "cost_per_1m_out_cached": 0.12,
3059      "context_window": 262144,
3060      "default_max_tokens": 131072,
3061      "can_reason": false,
3062      "supports_attachments": true
3063    },
3064    {
3065      "id": "qwen/qwen3-vl-8b-thinking",
3066      "name": "Qwen: Qwen3 VL 8B Thinking",
3067      "cost_per_1m_in": 0.117,
3068      "cost_per_1m_out": 1.365,
3069      "cost_per_1m_in_cached": 0,
3070      "cost_per_1m_out_cached": 0,
3071      "context_window": 131072,
3072      "default_max_tokens": 16384,
3073      "can_reason": true,
3074      "reasoning_levels": [
3075        "low",
3076        "medium",
3077        "high"
3078      ],
3079      "default_reasoning_effort": "medium",
3080      "supports_attachments": true
3081    },
3082    {
3083      "id": "qwen/qwen3.5-397b-a17b",
3084      "name": "Qwen: Qwen3.5 397B A17B",
3085      "cost_per_1m_in": 0.5,
3086      "cost_per_1m_out": 3.6,
3087      "cost_per_1m_in_cached": 0,
3088      "cost_per_1m_out_cached": 0.3,
3089      "context_window": 262144,
3090      "default_max_tokens": 131072,
3091      "can_reason": true,
3092      "reasoning_levels": [
3093        "low",
3094        "medium",
3095        "high"
3096      ],
3097      "default_reasoning_effort": "medium",
3098      "supports_attachments": true
3099    },
3100    {
3101      "id": "qwen/qwen3.5-plus-02-15",
3102      "name": "Qwen: Qwen3.5 Plus 2026-02-15",
3103      "cost_per_1m_in": 0.26,
3104      "cost_per_1m_out": 1.56,
3105      "cost_per_1m_in_cached": 0.325,
3106      "cost_per_1m_out_cached": 0,
3107      "context_window": 1000000,
3108      "default_max_tokens": 32768,
3109      "can_reason": true,
3110      "reasoning_levels": [
3111        "low",
3112        "medium",
3113        "high"
3114      ],
3115      "default_reasoning_effort": "medium",
3116      "supports_attachments": true
3117    },
3118    {
3119      "id": "qwen/qwen3.5-plus-20260420",
3120      "name": "Qwen: Qwen3.5 Plus 2026-04-20",
3121      "cost_per_1m_in": 0.4,
3122      "cost_per_1m_out": 2.4,
3123      "cost_per_1m_in_cached": 0,
3124      "cost_per_1m_out_cached": 0,
3125      "context_window": 1000000,
3126      "default_max_tokens": 32768,
3127      "can_reason": true,
3128      "reasoning_levels": [
3129        "low",
3130        "medium",
3131        "high"
3132      ],
3133      "default_reasoning_effort": "medium",
3134      "supports_attachments": true
3135    },
3136    {
3137      "id": "qwen/qwen3.5-122b-a10b",
3138      "name": "Qwen: Qwen3.5-122B-A10B",
3139      "cost_per_1m_in": 0.3,
3140      "cost_per_1m_out": 2.4,
3141      "cost_per_1m_in_cached": 0,
3142      "cost_per_1m_out_cached": 0.3,
3143      "context_window": 262144,
3144      "default_max_tokens": 32768,
3145      "can_reason": true,
3146      "reasoning_levels": [
3147        "low",
3148        "medium",
3149        "high"
3150      ],
3151      "default_reasoning_effort": "medium",
3152      "supports_attachments": true
3153    },
3154    {
3155      "id": "qwen/qwen3.5-27b",
3156      "name": "Qwen: Qwen3.5-27B",
3157      "cost_per_1m_in": 0.26,
3158      "cost_per_1m_out": 2.6,
3159      "cost_per_1m_in_cached": 0,
3160      "cost_per_1m_out_cached": 0,
3161      "context_window": 262144,
3162      "default_max_tokens": 40960,
3163      "can_reason": true,
3164      "reasoning_levels": [
3165        "low",
3166        "medium",
3167        "high"
3168      ],
3169      "default_reasoning_effort": "medium",
3170      "supports_attachments": true
3171    },
3172    {
3173      "id": "qwen/qwen3.5-35b-a3b",
3174      "name": "Qwen: Qwen3.5-35B-A3B",
3175      "cost_per_1m_in": 0.16,
3176      "cost_per_1m_out": 1.2,
3177      "cost_per_1m_in_cached": 0,
3178      "cost_per_1m_out_cached": 0,
3179      "context_window": 262144,
3180      "default_max_tokens": 131072,
3181      "can_reason": true,
3182      "reasoning_levels": [
3183        "low",
3184        "medium",
3185        "high"
3186      ],
3187      "default_reasoning_effort": "medium",
3188      "supports_attachments": true
3189    },
3190    {
3191      "id": "qwen/qwen3.5-9b",
3192      "name": "Qwen: Qwen3.5-9B",
3193      "cost_per_1m_in": 0.1,
3194      "cost_per_1m_out": 0.15,
3195      "cost_per_1m_in_cached": 0,
3196      "cost_per_1m_out_cached": 0,
3197      "context_window": 262144,
3198      "default_max_tokens": 26214,
3199      "can_reason": true,
3200      "reasoning_levels": [
3201        "low",
3202        "medium",
3203        "high"
3204      ],
3205      "default_reasoning_effort": "medium",
3206      "supports_attachments": true
3207    },
3208    {
3209      "id": "qwen/qwen3.5-flash-02-23",
3210      "name": "Qwen: Qwen3.5-Flash",
3211      "cost_per_1m_in": 0.065,
3212      "cost_per_1m_out": 0.26,
3213      "cost_per_1m_in_cached": 0.08125,
3214      "cost_per_1m_out_cached": 0,
3215      "context_window": 1000000,
3216      "default_max_tokens": 32768,
3217      "can_reason": true,
3218      "reasoning_levels": [
3219        "low",
3220        "medium",
3221        "high"
3222      ],
3223      "default_reasoning_effort": "medium",
3224      "supports_attachments": true
3225    },
3226    {
3227      "id": "qwen/qwen3.6-27b",
3228      "name": "Qwen: Qwen3.6 27B",
3229      "cost_per_1m_in": 0.5,
3230      "cost_per_1m_out": 2,
3231      "cost_per_1m_in_cached": 0,
3232      "cost_per_1m_out_cached": 0.25,
3233      "context_window": 262144,
3234      "default_max_tokens": 32768,
3235      "can_reason": true,
3236      "reasoning_levels": [
3237        "low",
3238        "medium",
3239        "high"
3240      ],
3241      "default_reasoning_effort": "medium",
3242      "supports_attachments": true
3243    },
3244    {
3245      "id": "qwen/qwen3.6-35b-a3b",
3246      "name": "Qwen: Qwen3.6 35B A3B",
3247      "cost_per_1m_in": 0.15,
3248      "cost_per_1m_out": 1,
3249      "cost_per_1m_in_cached": 0,
3250      "cost_per_1m_out_cached": 0.05,
3251      "context_window": 262144,
3252      "default_max_tokens": 131072,
3253      "can_reason": true,
3254      "reasoning_levels": [
3255        "low",
3256        "medium",
3257        "high"
3258      ],
3259      "default_reasoning_effort": "medium",
3260      "supports_attachments": true
3261    },
3262    {
3263      "id": "qwen/qwen3.6-flash",
3264      "name": "Qwen: Qwen3.6 Flash",
3265      "cost_per_1m_in": 0.25,
3266      "cost_per_1m_out": 1.5,
3267      "cost_per_1m_in_cached": 0.3125,
3268      "cost_per_1m_out_cached": 0,
3269      "context_window": 1000000,
3270      "default_max_tokens": 32768,
3271      "can_reason": true,
3272      "reasoning_levels": [
3273        "low",
3274        "medium",
3275        "high"
3276      ],
3277      "default_reasoning_effort": "medium",
3278      "supports_attachments": true
3279    },
3280    {
3281      "id": "qwen/qwen3.6-max-preview",
3282      "name": "Qwen: Qwen3.6 Max Preview",
3283      "cost_per_1m_in": 1.04,
3284      "cost_per_1m_out": 6.24,
3285      "cost_per_1m_in_cached": 1.3,
3286      "cost_per_1m_out_cached": 0,
3287      "context_window": 262144,
3288      "default_max_tokens": 32768,
3289      "can_reason": true,
3290      "reasoning_levels": [
3291        "low",
3292        "medium",
3293        "high"
3294      ],
3295      "default_reasoning_effort": "medium",
3296      "supports_attachments": false
3297    },
3298    {
3299      "id": "qwen/qwen3.6-plus",
3300      "name": "Qwen: Qwen3.6 Plus",
3301      "cost_per_1m_in": 0.325,
3302      "cost_per_1m_out": 1.95,
3303      "cost_per_1m_in_cached": 0.40625,
3304      "cost_per_1m_out_cached": 0,
3305      "context_window": 1000000,
3306      "default_max_tokens": 32768,
3307      "can_reason": true,
3308      "reasoning_levels": [
3309        "low",
3310        "medium",
3311        "high"
3312      ],
3313      "default_reasoning_effort": "medium",
3314      "supports_attachments": true
3315    },
3316    {
3317      "id": "relace/relace-search",
3318      "name": "Relace: Relace Search",
3319      "cost_per_1m_in": 1,
3320      "cost_per_1m_out": 3,
3321      "cost_per_1m_in_cached": 0,
3322      "cost_per_1m_out_cached": 0,
3323      "context_window": 256000,
3324      "default_max_tokens": 64000,
3325      "can_reason": false,
3326      "supports_attachments": false
3327    },
3328    {
3329      "id": "stepfun/step-3.5-flash",
3330      "name": "StepFun: Step 3.5 Flash",
3331      "cost_per_1m_in": 0.1,
3332      "cost_per_1m_out": 0.3,
3333      "cost_per_1m_in_cached": 0,
3334      "cost_per_1m_out_cached": 0,
3335      "context_window": 262144,
3336      "default_max_tokens": 32768,
3337      "can_reason": true,
3338      "reasoning_levels": [
3339        "low",
3340        "medium",
3341        "high"
3342      ],
3343      "default_reasoning_effort": "medium",
3344      "supports_attachments": false
3345    },
3346    {
3347      "id": "tencent/hy3-preview",
3348      "name": "Tencent: Hy3 preview",
3349      "cost_per_1m_in": 0.066,
3350      "cost_per_1m_out": 0.26,
3351      "cost_per_1m_in_cached": 0,
3352      "cost_per_1m_out_cached": 0.029,
3353      "context_window": 262144,
3354      "default_max_tokens": 131072,
3355      "can_reason": true,
3356      "reasoning_levels": [
3357        "low",
3358        "medium",
3359        "high"
3360      ],
3361      "default_reasoning_effort": "medium",
3362      "supports_attachments": false
3363    },
3364    {
3365      "id": "thedrummer/rocinante-12b",
3366      "name": "TheDrummer: Rocinante 12B",
3367      "cost_per_1m_in": 0.17,
3368      "cost_per_1m_out": 0.43,
3369      "cost_per_1m_in_cached": 0,
3370      "cost_per_1m_out_cached": 0,
3371      "context_window": 32768,
3372      "default_max_tokens": 16384,
3373      "can_reason": false,
3374      "supports_attachments": false
3375    },
3376    {
3377      "id": "thedrummer/unslopnemo-12b",
3378      "name": "TheDrummer: UnslopNemo 12B",
3379      "cost_per_1m_in": 0.4,
3380      "cost_per_1m_out": 0.4,
3381      "cost_per_1m_in_cached": 0,
3382      "cost_per_1m_out_cached": 0,
3383      "context_window": 32768,
3384      "default_max_tokens": 16384,
3385      "can_reason": false,
3386      "supports_attachments": false
3387    },
3388    {
3389      "id": "alibaba/tongyi-deepresearch-30b-a3b",
3390      "name": "Tongyi DeepResearch 30B A3B",
3391      "cost_per_1m_in": 0.09,
3392      "cost_per_1m_out": 0.45,
3393      "cost_per_1m_in_cached": 0,
3394      "cost_per_1m_out_cached": 0.09,
3395      "context_window": 131072,
3396      "default_max_tokens": 65536,
3397      "can_reason": true,
3398      "reasoning_levels": [
3399        "low",
3400        "medium",
3401        "high"
3402      ],
3403      "default_reasoning_effort": "medium",
3404      "supports_attachments": false
3405    },
3406    {
3407      "id": "upstage/solar-pro-3",
3408      "name": "Upstage: Solar Pro 3",
3409      "cost_per_1m_in": 0.15,
3410      "cost_per_1m_out": 0.6,
3411      "cost_per_1m_in_cached": 0,
3412      "cost_per_1m_out_cached": 0.015,
3413      "context_window": 128000,
3414      "default_max_tokens": 12800,
3415      "can_reason": true,
3416      "reasoning_levels": [
3417        "low",
3418        "medium",
3419        "high"
3420      ],
3421      "default_reasoning_effort": "medium",
3422      "supports_attachments": false
3423    },
3424    {
3425      "id": "xiaomi/mimo-v2-flash",
3426      "name": "Xiaomi: MiMo-V2-Flash",
3427      "cost_per_1m_in": 0.1,
3428      "cost_per_1m_out": 0.3,
3429      "cost_per_1m_in_cached": 0,
3430      "cost_per_1m_out_cached": 0.01,
3431      "context_window": 262144,
3432      "default_max_tokens": 32768,
3433      "can_reason": true,
3434      "reasoning_levels": [
3435        "low",
3436        "medium",
3437        "high"
3438      ],
3439      "default_reasoning_effort": "medium",
3440      "supports_attachments": false
3441    },
3442    {
3443      "id": "xiaomi/mimo-v2-omni",
3444      "name": "Xiaomi: MiMo-V2-Omni",
3445      "cost_per_1m_in": 0.4,
3446      "cost_per_1m_out": 2,
3447      "cost_per_1m_in_cached": 0,
3448      "cost_per_1m_out_cached": 0.08,
3449      "context_window": 262144,
3450      "default_max_tokens": 32768,
3451      "can_reason": true,
3452      "reasoning_levels": [
3453        "low",
3454        "medium",
3455        "high"
3456      ],
3457      "default_reasoning_effort": "medium",
3458      "supports_attachments": true
3459    },
3460    {
3461      "id": "xiaomi/mimo-v2-pro",
3462      "name": "Xiaomi: MiMo-V2-Pro",
3463      "cost_per_1m_in": 1,
3464      "cost_per_1m_out": 3,
3465      "cost_per_1m_in_cached": 0,
3466      "cost_per_1m_out_cached": 0.2,
3467      "context_window": 1048576,
3468      "default_max_tokens": 65536,
3469      "can_reason": true,
3470      "reasoning_levels": [
3471        "low",
3472        "medium",
3473        "high"
3474      ],
3475      "default_reasoning_effort": "medium",
3476      "supports_attachments": false
3477    },
3478    {
3479      "id": "xiaomi/mimo-v2.5",
3480      "name": "Xiaomi: MiMo-V2.5",
3481      "cost_per_1m_in": 0.4,
3482      "cost_per_1m_out": 2,
3483      "cost_per_1m_in_cached": 0,
3484      "cost_per_1m_out_cached": 0.08,
3485      "context_window": 1048576,
3486      "default_max_tokens": 65536,
3487      "can_reason": true,
3488      "reasoning_levels": [
3489        "low",
3490        "medium",
3491        "high"
3492      ],
3493      "default_reasoning_effort": "medium",
3494      "supports_attachments": true
3495    },
3496    {
3497      "id": "xiaomi/mimo-v2.5-pro",
3498      "name": "Xiaomi: MiMo-V2.5-Pro",
3499      "cost_per_1m_in": 1,
3500      "cost_per_1m_out": 3,
3501      "cost_per_1m_in_cached": 0,
3502      "cost_per_1m_out_cached": 0.2,
3503      "context_window": 1048576,
3504      "default_max_tokens": 65536,
3505      "can_reason": true,
3506      "reasoning_levels": [
3507        "low",
3508        "medium",
3509        "high"
3510      ],
3511      "default_reasoning_effort": "medium",
3512      "supports_attachments": false
3513    },
3514    {
3515      "id": "z-ai/glm-4-32b",
3516      "name": "Z.ai: GLM 4 32B ",
3517      "cost_per_1m_in": 0.1,
3518      "cost_per_1m_out": 0.1,
3519      "cost_per_1m_in_cached": 0,
3520      "cost_per_1m_out_cached": 0,
3521      "context_window": 128000,
3522      "default_max_tokens": 12800,
3523      "can_reason": false,
3524      "supports_attachments": false
3525    },
3526    {
3527      "id": "z-ai/glm-4.5",
3528      "name": "Z.ai: GLM 4.5",
3529      "cost_per_1m_in": 0.6,
3530      "cost_per_1m_out": 2.2,
3531      "cost_per_1m_in_cached": 0,
3532      "cost_per_1m_out_cached": 0.11,
3533      "context_window": 131072,
3534      "default_max_tokens": 49152,
3535      "can_reason": true,
3536      "reasoning_levels": [
3537        "low",
3538        "medium",
3539        "high"
3540      ],
3541      "default_reasoning_effort": "medium",
3542      "supports_attachments": false
3543    },
3544    {
3545      "id": "z-ai/glm-4.5-air",
3546      "name": "Z.ai: GLM 4.5 Air",
3547      "cost_per_1m_in": 0.2,
3548      "cost_per_1m_out": 1.1,
3549      "cost_per_1m_in_cached": 0,
3550      "cost_per_1m_out_cached": 0.03,
3551      "context_window": 131072,
3552      "default_max_tokens": 48000,
3553      "can_reason": true,
3554      "reasoning_levels": [
3555        "low",
3556        "medium",
3557        "high"
3558      ],
3559      "default_reasoning_effort": "medium",
3560      "supports_attachments": false
3561    },
3562    {
3563      "id": "z-ai/glm-4.5-air:free",
3564      "name": "Z.ai: GLM 4.5 Air (free)",
3565      "cost_per_1m_in": 0,
3566      "cost_per_1m_out": 0,
3567      "cost_per_1m_in_cached": 0,
3568      "cost_per_1m_out_cached": 0,
3569      "context_window": 131072,
3570      "default_max_tokens": 48000,
3571      "can_reason": true,
3572      "reasoning_levels": [
3573        "low",
3574        "medium",
3575        "high"
3576      ],
3577      "default_reasoning_effort": "medium",
3578      "supports_attachments": false
3579    },
3580    {
3581      "id": "z-ai/glm-4.5v",
3582      "name": "Z.ai: GLM 4.5V",
3583      "cost_per_1m_in": 0.6,
3584      "cost_per_1m_out": 1.8,
3585      "cost_per_1m_in_cached": 0,
3586      "cost_per_1m_out_cached": 0.11,
3587      "context_window": 65536,
3588      "default_max_tokens": 8192,
3589      "can_reason": true,
3590      "reasoning_levels": [
3591        "low",
3592        "medium",
3593        "high"
3594      ],
3595      "default_reasoning_effort": "medium",
3596      "supports_attachments": true
3597    },
3598    {
3599      "id": "z-ai/glm-4.6",
3600      "name": "Z.ai: GLM 4.6",
3601      "cost_per_1m_in": 0.39,
3602      "cost_per_1m_out": 1.9,
3603      "cost_per_1m_in_cached": 0,
3604      "cost_per_1m_out_cached": 0,
3605      "context_window": 204800,
3606      "default_max_tokens": 102400,
3607      "can_reason": true,
3608      "reasoning_levels": [
3609        "low",
3610        "medium",
3611        "high"
3612      ],
3613      "default_reasoning_effort": "medium",
3614      "supports_attachments": false
3615    },
3616    {
3617      "id": "z-ai/glm-4.6v",
3618      "name": "Z.ai: GLM 4.6V",
3619      "cost_per_1m_in": 0.3,
3620      "cost_per_1m_out": 0.9,
3621      "cost_per_1m_in_cached": 0,
3622      "cost_per_1m_out_cached": 0.05,
3623      "context_window": 131072,
3624      "default_max_tokens": 12000,
3625      "can_reason": true,
3626      "reasoning_levels": [
3627        "low",
3628        "medium",
3629        "high"
3630      ],
3631      "default_reasoning_effort": "medium",
3632      "supports_attachments": true
3633    },
3634    {
3635      "id": "z-ai/glm-4.7",
3636      "name": "Z.ai: GLM 4.7",
3637      "cost_per_1m_in": 0.45,
3638      "cost_per_1m_out": 2.2,
3639      "cost_per_1m_in_cached": 0,
3640      "cost_per_1m_out_cached": 0.11,
3641      "context_window": 204800,
3642      "default_max_tokens": 102400,
3643      "can_reason": true,
3644      "reasoning_levels": [
3645        "low",
3646        "medium",
3647        "high"
3648      ],
3649      "default_reasoning_effort": "medium",
3650      "supports_attachments": false
3651    },
3652    {
3653      "id": "z-ai/glm-4.7-flash",
3654      "name": "Z.ai: GLM 4.7 Flash",
3655      "cost_per_1m_in": 0.1,
3656      "cost_per_1m_out": 0.43,
3657      "cost_per_1m_in_cached": 0,
3658      "cost_per_1m_out_cached": 0,
3659      "context_window": 202752,
3660      "default_max_tokens": 101376,
3661      "can_reason": true,
3662      "reasoning_levels": [
3663        "low",
3664        "medium",
3665        "high"
3666      ],
3667      "default_reasoning_effort": "medium",
3668      "supports_attachments": false
3669    },
3670    {
3671      "id": "z-ai/glm-5",
3672      "name": "Z.ai: GLM 5",
3673      "cost_per_1m_in": 0.95,
3674      "cost_per_1m_out": 2.55,
3675      "cost_per_1m_in_cached": 0,
3676      "cost_per_1m_out_cached": 0.2,
3677      "context_window": 204800,
3678      "default_max_tokens": 65536,
3679      "can_reason": true,
3680      "reasoning_levels": [
3681        "low",
3682        "medium",
3683        "high"
3684      ],
3685      "default_reasoning_effort": "medium",
3686      "supports_attachments": false
3687    },
3688    {
3689      "id": "z-ai/glm-5-turbo",
3690      "name": "Z.ai: GLM 5 Turbo",
3691      "cost_per_1m_in": 1.2,
3692      "cost_per_1m_out": 4,
3693      "cost_per_1m_in_cached": 0,
3694      "cost_per_1m_out_cached": 0.24,
3695      "context_window": 262144,
3696      "default_max_tokens": 65536,
3697      "can_reason": true,
3698      "reasoning_levels": [
3699        "low",
3700        "medium",
3701        "high"
3702      ],
3703      "default_reasoning_effort": "medium",
3704      "supports_attachments": false
3705    },
3706    {
3707      "id": "z-ai/glm-5.1",
3708      "name": "Z.ai: GLM 5.1",
3709      "cost_per_1m_in": 1.4,
3710      "cost_per_1m_out": 4.4,
3711      "cost_per_1m_in_cached": 0,
3712      "cost_per_1m_out_cached": 0.26,
3713      "context_window": 204800,
3714      "default_max_tokens": 65536,
3715      "can_reason": true,
3716      "reasoning_levels": [
3717        "low",
3718        "medium",
3719        "high"
3720      ],
3721      "default_reasoning_effort": "medium",
3722      "supports_attachments": false
3723    },
3724    {
3725      "id": "z-ai/glm-5v-turbo",
3726      "name": "Z.ai: GLM 5V Turbo",
3727      "cost_per_1m_in": 1.2,
3728      "cost_per_1m_out": 4,
3729      "cost_per_1m_in_cached": 0,
3730      "cost_per_1m_out_cached": 0.24,
3731      "context_window": 202752,
3732      "default_max_tokens": 65536,
3733      "can_reason": true,
3734      "reasoning_levels": [
3735        "low",
3736        "medium",
3737        "high"
3738      ],
3739      "default_reasoning_effort": "medium",
3740      "supports_attachments": true
3741    },
3742    {
3743      "id": "inclusionai/ling-2.6-1t",
3744      "name": "inclusionAI: Ling-2.6-1T",
3745      "cost_per_1m_in": 0.3,
3746      "cost_per_1m_out": 2.5,
3747      "cost_per_1m_in_cached": 0,
3748      "cost_per_1m_out_cached": 0.06,
3749      "context_window": 262144,
3750      "default_max_tokens": 16384,
3751      "can_reason": false,
3752      "supports_attachments": false
3753    },
3754    {
3755      "id": "inclusionai/ling-2.6-flash",
3756      "name": "inclusionAI: Ling-2.6-flash",
3757      "cost_per_1m_in": 0.08,
3758      "cost_per_1m_out": 0.24,
3759      "cost_per_1m_in_cached": 0,
3760      "cost_per_1m_out_cached": 0.016,
3761      "context_window": 262144,
3762      "default_max_tokens": 16384,
3763      "can_reason": false,
3764      "supports_attachments": false
3765    },
3766    {
3767      "id": "inclusionai/ring-2.6-1t:free",
3768      "name": "inclusionAI: Ring-2.6-1T (free)",
3769      "cost_per_1m_in": 0,
3770      "cost_per_1m_out": 0,
3771      "cost_per_1m_in_cached": 0,
3772      "cost_per_1m_out_cached": 0,
3773      "context_window": 262144,
3774      "default_max_tokens": 32768,
3775      "can_reason": true,
3776      "reasoning_levels": [
3777        "low",
3778        "medium",
3779        "high"
3780      ],
3781      "default_reasoning_effort": "medium",
3782      "supports_attachments": false
3783    },
3784    {
3785      "id": "x-ai/grok-3",
3786      "name": "xAI: Grok 3",
3787      "cost_per_1m_in": 5,
3788      "cost_per_1m_out": 25,
3789      "cost_per_1m_in_cached": 0,
3790      "cost_per_1m_out_cached": 1.25,
3791      "context_window": 131072,
3792      "default_max_tokens": 13107,
3793      "can_reason": false,
3794      "supports_attachments": false
3795    },
3796    {
3797      "id": "x-ai/grok-3-beta",
3798      "name": "xAI: Grok 3 Beta",
3799      "cost_per_1m_in": 5,
3800      "cost_per_1m_out": 25,
3801      "cost_per_1m_in_cached": 0,
3802      "cost_per_1m_out_cached": 1.25,
3803      "context_window": 131072,
3804      "default_max_tokens": 13107,
3805      "can_reason": false,
3806      "supports_attachments": false
3807    },
3808    {
3809      "id": "x-ai/grok-3-mini",
3810      "name": "xAI: Grok 3 Mini",
3811      "cost_per_1m_in": 0.3,
3812      "cost_per_1m_out": 0.5,
3813      "cost_per_1m_in_cached": 0,
3814      "cost_per_1m_out_cached": 0.075,
3815      "context_window": 131072,
3816      "default_max_tokens": 13107,
3817      "can_reason": true,
3818      "reasoning_levels": [
3819        "low",
3820        "medium",
3821        "high"
3822      ],
3823      "default_reasoning_effort": "medium",
3824      "supports_attachments": false
3825    },
3826    {
3827      "id": "x-ai/grok-3-mini-beta",
3828      "name": "xAI: Grok 3 Mini Beta",
3829      "cost_per_1m_in": 0.3,
3830      "cost_per_1m_out": 0.5,
3831      "cost_per_1m_in_cached": 0,
3832      "cost_per_1m_out_cached": 0.075,
3833      "context_window": 131072,
3834      "default_max_tokens": 13107,
3835      "can_reason": true,
3836      "reasoning_levels": [
3837        "low",
3838        "medium",
3839        "high"
3840      ],
3841      "default_reasoning_effort": "medium",
3842      "supports_attachments": false
3843    },
3844    {
3845      "id": "x-ai/grok-4",
3846      "name": "xAI: Grok 4",
3847      "cost_per_1m_in": 3,
3848      "cost_per_1m_out": 15,
3849      "cost_per_1m_in_cached": 0,
3850      "cost_per_1m_out_cached": 0.75,
3851      "context_window": 256000,
3852      "default_max_tokens": 25600,
3853      "can_reason": true,
3854      "reasoning_levels": [
3855        "low",
3856        "medium",
3857        "high"
3858      ],
3859      "default_reasoning_effort": "medium",
3860      "supports_attachments": true
3861    },
3862    {
3863      "id": "x-ai/grok-4-fast",
3864      "name": "xAI: Grok 4 Fast",
3865      "cost_per_1m_in": 0.2,
3866      "cost_per_1m_out": 0.5,
3867      "cost_per_1m_in_cached": 0,
3868      "cost_per_1m_out_cached": 0.05,
3869      "context_window": 2000000,
3870      "default_max_tokens": 15000,
3871      "can_reason": true,
3872      "reasoning_levels": [
3873        "low",
3874        "medium",
3875        "high"
3876      ],
3877      "default_reasoning_effort": "medium",
3878      "supports_attachments": true
3879    },
3880    {
3881      "id": "x-ai/grok-4.1-fast",
3882      "name": "xAI: Grok 4.1 Fast",
3883      "cost_per_1m_in": 0.2,
3884      "cost_per_1m_out": 0.5,
3885      "cost_per_1m_in_cached": 0,
3886      "cost_per_1m_out_cached": 0.05,
3887      "context_window": 2000000,
3888      "default_max_tokens": 15000,
3889      "can_reason": true,
3890      "reasoning_levels": [
3891        "low",
3892        "medium",
3893        "high"
3894      ],
3895      "default_reasoning_effort": "medium",
3896      "supports_attachments": true
3897    },
3898    {
3899      "id": "x-ai/grok-4.20",
3900      "name": "xAI: Grok 4.20",
3901      "cost_per_1m_in": 1.25,
3902      "cost_per_1m_out": 2.5,
3903      "cost_per_1m_in_cached": 0,
3904      "cost_per_1m_out_cached": 0.2,
3905      "context_window": 2000000,
3906      "default_max_tokens": 200000,
3907      "can_reason": true,
3908      "reasoning_levels": [
3909        "low",
3910        "medium",
3911        "high"
3912      ],
3913      "default_reasoning_effort": "medium",
3914      "supports_attachments": true
3915    },
3916    {
3917      "id": "x-ai/grok-4.3",
3918      "name": "xAI: Grok 4.3",
3919      "cost_per_1m_in": 1.25,
3920      "cost_per_1m_out": 2.5,
3921      "cost_per_1m_in_cached": 0,
3922      "cost_per_1m_out_cached": 0.2,
3923      "context_window": 1000000,
3924      "default_max_tokens": 100000,
3925      "can_reason": true,
3926      "reasoning_levels": [
3927        "low",
3928        "medium",
3929        "high"
3930      ],
3931      "default_reasoning_effort": "medium",
3932      "supports_attachments": true
3933    },
3934    {
3935      "id": "x-ai/grok-code-fast-1",
3936      "name": "xAI: Grok Code Fast 1",
3937      "cost_per_1m_in": 0.2,
3938      "cost_per_1m_out": 1.5,
3939      "cost_per_1m_in_cached": 0,
3940      "cost_per_1m_out_cached": 0.02,
3941      "context_window": 256000,
3942      "default_max_tokens": 5000,
3943      "can_reason": true,
3944      "reasoning_levels": [
3945        "low",
3946        "medium",
3947        "high"
3948      ],
3949      "default_reasoning_effort": "medium",
3950      "supports_attachments": false
3951    }
3952  ],
3953  "default_headers": {
3954    "HTTP-Referer": "https://charm.land",
3955    "X-Title": "Crush"
3956  }
3957}