openrouter.json

   1{
   2  "name": "OpenRouter",
   3  "id": "openrouter",
   4  "api_key": "$OPENROUTER_API_KEY",
   5  "api_endpoint": "https://openrouter.ai/api/v1",
   6  "type": "openrouter",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-3.5-haiku",
   9  "models": [
  10    {
  11      "id": "ai21/jamba-large-1.7",
  12      "name": "AI21: Jamba Large 1.7",
  13      "cost_per_1m_in": 2,
  14      "cost_per_1m_out": 8,
  15      "cost_per_1m_in_cached": 0,
  16      "cost_per_1m_out_cached": 0,
  17      "context_window": 256000,
  18      "default_max_tokens": 2048,
  19      "can_reason": false,
  20      "supports_attachments": false,
  21      "options": {}
  22    },
  23    {
  24      "id": "ai21/jamba-mini-1.7",
  25      "name": "AI21: Jamba Mini 1.7",
  26      "cost_per_1m_in": 0.19999999999999998,
  27      "cost_per_1m_out": 0.39999999999999997,
  28      "cost_per_1m_in_cached": 0,
  29      "cost_per_1m_out_cached": 0,
  30      "context_window": 256000,
  31      "default_max_tokens": 2048,
  32      "can_reason": false,
  33      "supports_attachments": false,
  34      "options": {}
  35    },
  36    {
  37      "id": "allenai/olmo-3-7b-instruct",
  38      "name": "AllenAI: Olmo 3 7B Instruct",
  39      "cost_per_1m_in": 0.09999999999999999,
  40      "cost_per_1m_out": 0.19999999999999998,
  41      "cost_per_1m_in_cached": 0,
  42      "cost_per_1m_out_cached": 0,
  43      "context_window": 65536,
  44      "default_max_tokens": 32768,
  45      "can_reason": false,
  46      "supports_attachments": false,
  47      "options": {}
  48    },
  49    {
  50      "id": "amazon/nova-lite-v1",
  51      "name": "Amazon: Nova Lite 1.0",
  52      "cost_per_1m_in": 0.06,
  53      "cost_per_1m_out": 0.24,
  54      "cost_per_1m_in_cached": 0,
  55      "cost_per_1m_out_cached": 0,
  56      "context_window": 300000,
  57      "default_max_tokens": 2560,
  58      "can_reason": false,
  59      "supports_attachments": true,
  60      "options": {}
  61    },
  62    {
  63      "id": "amazon/nova-micro-v1",
  64      "name": "Amazon: Nova Micro 1.0",
  65      "cost_per_1m_in": 0.035,
  66      "cost_per_1m_out": 0.14,
  67      "cost_per_1m_in_cached": 0,
  68      "cost_per_1m_out_cached": 0,
  69      "context_window": 128000,
  70      "default_max_tokens": 2560,
  71      "can_reason": false,
  72      "supports_attachments": false,
  73      "options": {}
  74    },
  75    {
  76      "id": "amazon/nova-premier-v1",
  77      "name": "Amazon: Nova Premier 1.0",
  78      "cost_per_1m_in": 2.5,
  79      "cost_per_1m_out": 12.5,
  80      "cost_per_1m_in_cached": 0,
  81      "cost_per_1m_out_cached": 0.625,
  82      "context_window": 1000000,
  83      "default_max_tokens": 16000,
  84      "can_reason": false,
  85      "supports_attachments": true,
  86      "options": {}
  87    },
  88    {
  89      "id": "amazon/nova-pro-v1",
  90      "name": "Amazon: Nova Pro 1.0",
  91      "cost_per_1m_in": 0.7999999999999999,
  92      "cost_per_1m_out": 3.1999999999999997,
  93      "cost_per_1m_in_cached": 0,
  94      "cost_per_1m_out_cached": 0,
  95      "context_window": 300000,
  96      "default_max_tokens": 2560,
  97      "can_reason": false,
  98      "supports_attachments": true,
  99      "options": {}
 100    },
 101    {
 102      "id": "anthropic/claude-3-haiku",
 103      "name": "Anthropic: Claude 3 Haiku",
 104      "cost_per_1m_in": 0.25,
 105      "cost_per_1m_out": 1.25,
 106      "cost_per_1m_in_cached": 0.3,
 107      "cost_per_1m_out_cached": 0.03,
 108      "context_window": 200000,
 109      "default_max_tokens": 2048,
 110      "can_reason": false,
 111      "supports_attachments": true,
 112      "options": {}
 113    },
 114    {
 115      "id": "anthropic/claude-3-opus",
 116      "name": "Anthropic: Claude 3 Opus",
 117      "cost_per_1m_in": 15,
 118      "cost_per_1m_out": 75,
 119      "cost_per_1m_in_cached": 18.75,
 120      "cost_per_1m_out_cached": 1.5,
 121      "context_window": 200000,
 122      "default_max_tokens": 2048,
 123      "can_reason": false,
 124      "supports_attachments": true,
 125      "options": {}
 126    },
 127    {
 128      "id": "anthropic/claude-3.5-haiku",
 129      "name": "Anthropic: Claude 3.5 Haiku",
 130      "cost_per_1m_in": 0.7999999999999999,
 131      "cost_per_1m_out": 4,
 132      "cost_per_1m_in_cached": 1,
 133      "cost_per_1m_out_cached": 0.08,
 134      "context_window": 200000,
 135      "default_max_tokens": 4096,
 136      "can_reason": false,
 137      "supports_attachments": true,
 138      "options": {}
 139    },
 140    {
 141      "id": "anthropic/claude-3.5-haiku-20241022",
 142      "name": "Anthropic: Claude 3.5 Haiku (2024-10-22)",
 143      "cost_per_1m_in": 0.7999999999999999,
 144      "cost_per_1m_out": 4,
 145      "cost_per_1m_in_cached": 1,
 146      "cost_per_1m_out_cached": 0.08,
 147      "context_window": 200000,
 148      "default_max_tokens": 4096,
 149      "can_reason": false,
 150      "supports_attachments": true,
 151      "options": {}
 152    },
 153    {
 154      "id": "anthropic/claude-3.5-sonnet",
 155      "name": "Anthropic: Claude 3.5 Sonnet",
 156      "cost_per_1m_in": 3,
 157      "cost_per_1m_out": 15,
 158      "cost_per_1m_in_cached": 0,
 159      "cost_per_1m_out_cached": 0,
 160      "context_window": 200000,
 161      "default_max_tokens": 4096,
 162      "can_reason": false,
 163      "supports_attachments": true,
 164      "options": {}
 165    },
 166    {
 167      "id": "anthropic/claude-3.7-sonnet",
 168      "name": "Anthropic: Claude 3.7 Sonnet",
 169      "cost_per_1m_in": 3,
 170      "cost_per_1m_out": 15,
 171      "cost_per_1m_in_cached": 3.75,
 172      "cost_per_1m_out_cached": 0.3,
 173      "context_window": 200000,
 174      "default_max_tokens": 32000,
 175      "can_reason": true,
 176      "reasoning_levels": [
 177        "low",
 178        "medium",
 179        "high"
 180      ],
 181      "default_reasoning_effort": "medium",
 182      "supports_attachments": true,
 183      "options": {}
 184    },
 185    {
 186      "id": "anthropic/claude-3.7-sonnet:thinking",
 187      "name": "Anthropic: Claude 3.7 Sonnet (thinking)",
 188      "cost_per_1m_in": 3,
 189      "cost_per_1m_out": 15,
 190      "cost_per_1m_in_cached": 3.75,
 191      "cost_per_1m_out_cached": 0.3,
 192      "context_window": 200000,
 193      "default_max_tokens": 32000,
 194      "can_reason": true,
 195      "reasoning_levels": [
 196        "low",
 197        "medium",
 198        "high"
 199      ],
 200      "default_reasoning_effort": "medium",
 201      "supports_attachments": true,
 202      "options": {}
 203    },
 204    {
 205      "id": "anthropic/claude-haiku-4.5",
 206      "name": "Anthropic: Claude Haiku 4.5",
 207      "cost_per_1m_in": 1,
 208      "cost_per_1m_out": 5,
 209      "cost_per_1m_in_cached": 1.25,
 210      "cost_per_1m_out_cached": 0.09999999999999999,
 211      "context_window": 200000,
 212      "default_max_tokens": 32000,
 213      "can_reason": true,
 214      "reasoning_levels": [
 215        "low",
 216        "medium",
 217        "high"
 218      ],
 219      "default_reasoning_effort": "medium",
 220      "supports_attachments": true,
 221      "options": {}
 222    },
 223    {
 224      "id": "anthropic/claude-opus-4",
 225      "name": "Anthropic: Claude Opus 4",
 226      "cost_per_1m_in": 15,
 227      "cost_per_1m_out": 75,
 228      "cost_per_1m_in_cached": 18.75,
 229      "cost_per_1m_out_cached": 1.5,
 230      "context_window": 200000,
 231      "default_max_tokens": 16000,
 232      "can_reason": true,
 233      "reasoning_levels": [
 234        "low",
 235        "medium",
 236        "high"
 237      ],
 238      "default_reasoning_effort": "medium",
 239      "supports_attachments": true,
 240      "options": {}
 241    },
 242    {
 243      "id": "anthropic/claude-opus-4.1",
 244      "name": "Anthropic: Claude Opus 4.1",
 245      "cost_per_1m_in": 15,
 246      "cost_per_1m_out": 75,
 247      "cost_per_1m_in_cached": 18.75,
 248      "cost_per_1m_out_cached": 1.5,
 249      "context_window": 200000,
 250      "default_max_tokens": 16000,
 251      "can_reason": true,
 252      "reasoning_levels": [
 253        "low",
 254        "medium",
 255        "high"
 256      ],
 257      "default_reasoning_effort": "medium",
 258      "supports_attachments": true,
 259      "options": {}
 260    },
 261    {
 262      "id": "anthropic/claude-opus-4.5",
 263      "name": "Anthropic: Claude Opus 4.5",
 264      "cost_per_1m_in": 5,
 265      "cost_per_1m_out": 25,
 266      "cost_per_1m_in_cached": 6.25,
 267      "cost_per_1m_out_cached": 0.5,
 268      "context_window": 200000,
 269      "default_max_tokens": 16000,
 270      "can_reason": true,
 271      "reasoning_levels": [
 272        "low",
 273        "medium",
 274        "high"
 275      ],
 276      "default_reasoning_effort": "medium",
 277      "supports_attachments": true,
 278      "options": {}
 279    },
 280    {
 281      "id": "anthropic/claude-sonnet-4",
 282      "name": "Anthropic: Claude Sonnet 4",
 283      "cost_per_1m_in": 3,
 284      "cost_per_1m_out": 15,
 285      "cost_per_1m_in_cached": 3.75,
 286      "cost_per_1m_out_cached": 0.3,
 287      "context_window": 1000000,
 288      "default_max_tokens": 32000,
 289      "can_reason": true,
 290      "reasoning_levels": [
 291        "low",
 292        "medium",
 293        "high"
 294      ],
 295      "default_reasoning_effort": "medium",
 296      "supports_attachments": true,
 297      "options": {}
 298    },
 299    {
 300      "id": "anthropic/claude-sonnet-4.5",
 301      "name": "Anthropic: Claude Sonnet 4.5",
 302      "cost_per_1m_in": 3,
 303      "cost_per_1m_out": 15,
 304      "cost_per_1m_in_cached": 3.75,
 305      "cost_per_1m_out_cached": 0.3,
 306      "context_window": 1000000,
 307      "default_max_tokens": 32000,
 308      "can_reason": true,
 309      "reasoning_levels": [
 310        "low",
 311        "medium",
 312        "high"
 313      ],
 314      "default_reasoning_effort": "medium",
 315      "supports_attachments": true,
 316      "options": {}
 317    },
 318    {
 319      "id": "arcee-ai/virtuoso-large",
 320      "name": "Arcee AI: Virtuoso Large",
 321      "cost_per_1m_in": 0.75,
 322      "cost_per_1m_out": 1.2,
 323      "cost_per_1m_in_cached": 0,
 324      "cost_per_1m_out_cached": 0,
 325      "context_window": 131072,
 326      "default_max_tokens": 32000,
 327      "can_reason": false,
 328      "supports_attachments": false,
 329      "options": {}
 330    },
 331    {
 332      "id": "baidu/ernie-4.5-21b-a3b",
 333      "name": "Baidu: ERNIE 4.5 21B A3B",
 334      "cost_per_1m_in": 0.056,
 335      "cost_per_1m_out": 0.224,
 336      "cost_per_1m_in_cached": 0,
 337      "cost_per_1m_out_cached": 0,
 338      "context_window": 120000,
 339      "default_max_tokens": 4000,
 340      "can_reason": false,
 341      "supports_attachments": false,
 342      "options": {}
 343    },
 344    {
 345      "id": "baidu/ernie-4.5-vl-28b-a3b",
 346      "name": "Baidu: ERNIE 4.5 VL 28B A3B",
 347      "cost_per_1m_in": 0.112,
 348      "cost_per_1m_out": 0.448,
 349      "cost_per_1m_in_cached": 0,
 350      "cost_per_1m_out_cached": 0,
 351      "context_window": 30000,
 352      "default_max_tokens": 4000,
 353      "can_reason": true,
 354      "reasoning_levels": [
 355        "low",
 356        "medium",
 357        "high"
 358      ],
 359      "default_reasoning_effort": "medium",
 360      "supports_attachments": true,
 361      "options": {}
 362    },
 363    {
 364      "id": "deepcogito/cogito-v2-preview-llama-109b-moe",
 365      "name": "Cogito V2 Preview Llama 109B",
 366      "cost_per_1m_in": 0.18,
 367      "cost_per_1m_out": 0.59,
 368      "cost_per_1m_in_cached": 0,
 369      "cost_per_1m_out_cached": 0,
 370      "context_window": 32767,
 371      "default_max_tokens": 3276,
 372      "can_reason": true,
 373      "reasoning_levels": [
 374        "low",
 375        "medium",
 376        "high"
 377      ],
 378      "default_reasoning_effort": "medium",
 379      "supports_attachments": true,
 380      "options": {}
 381    },
 382    {
 383      "id": "cohere/command-r-08-2024",
 384      "name": "Cohere: Command R (08-2024)",
 385      "cost_per_1m_in": 0.15,
 386      "cost_per_1m_out": 0.6,
 387      "cost_per_1m_in_cached": 0,
 388      "cost_per_1m_out_cached": 0,
 389      "context_window": 128000,
 390      "default_max_tokens": 2000,
 391      "can_reason": false,
 392      "supports_attachments": false,
 393      "options": {}
 394    },
 395    {
 396      "id": "cohere/command-r-plus-08-2024",
 397      "name": "Cohere: Command R+ (08-2024)",
 398      "cost_per_1m_in": 2.5,
 399      "cost_per_1m_out": 10,
 400      "cost_per_1m_in_cached": 0,
 401      "cost_per_1m_out_cached": 0,
 402      "context_window": 128000,
 403      "default_max_tokens": 2000,
 404      "can_reason": false,
 405      "supports_attachments": false,
 406      "options": {}
 407    },
 408    {
 409      "id": "deepcogito/cogito-v2-preview-llama-405b",
 410      "name": "Deep Cogito: Cogito V2 Preview Llama 405B",
 411      "cost_per_1m_in": 3.5,
 412      "cost_per_1m_out": 3.5,
 413      "cost_per_1m_in_cached": 0,
 414      "cost_per_1m_out_cached": 0,
 415      "context_window": 32768,
 416      "default_max_tokens": 3276,
 417      "can_reason": true,
 418      "reasoning_levels": [
 419        "low",
 420        "medium",
 421        "high"
 422      ],
 423      "default_reasoning_effort": "medium",
 424      "supports_attachments": false,
 425      "options": {}
 426    },
 427    {
 428      "id": "deepcogito/cogito-v2-preview-llama-70b",
 429      "name": "Deep Cogito: Cogito V2 Preview Llama 70B",
 430      "cost_per_1m_in": 0.88,
 431      "cost_per_1m_out": 0.88,
 432      "cost_per_1m_in_cached": 0,
 433      "cost_per_1m_out_cached": 0,
 434      "context_window": 32768,
 435      "default_max_tokens": 3276,
 436      "can_reason": true,
 437      "reasoning_levels": [
 438        "low",
 439        "medium",
 440        "high"
 441      ],
 442      "default_reasoning_effort": "medium",
 443      "supports_attachments": false,
 444      "options": {}
 445    },
 446    {
 447      "id": "deepseek/deepseek-chat",
 448      "name": "DeepSeek: DeepSeek V3",
 449      "cost_per_1m_in": 0.32,
 450      "cost_per_1m_out": 1.04,
 451      "cost_per_1m_in_cached": 0,
 452      "cost_per_1m_out_cached": 0,
 453      "context_window": 64000,
 454      "default_max_tokens": 8000,
 455      "can_reason": false,
 456      "supports_attachments": false,
 457      "options": {}
 458    },
 459    {
 460      "id": "deepseek/deepseek-chat-v3-0324",
 461      "name": "DeepSeek: DeepSeek V3 0324",
 462      "cost_per_1m_in": 0.216,
 463      "cost_per_1m_out": 0.896,
 464      "cost_per_1m_in_cached": 0,
 465      "cost_per_1m_out_cached": 0.135,
 466      "context_window": 163840,
 467      "default_max_tokens": 81920,
 468      "can_reason": false,
 469      "supports_attachments": false,
 470      "options": {}
 471    },
 472    {
 473      "id": "deepseek/deepseek-chat-v3.1",
 474      "name": "DeepSeek: DeepSeek V3.1",
 475      "cost_per_1m_in": 0.27,
 476      "cost_per_1m_out": 1,
 477      "cost_per_1m_in_cached": 0,
 478      "cost_per_1m_out_cached": 0,
 479      "context_window": 163840,
 480      "default_max_tokens": 16384,
 481      "can_reason": true,
 482      "reasoning_levels": [
 483        "low",
 484        "medium",
 485        "high"
 486      ],
 487      "default_reasoning_effort": "medium",
 488      "supports_attachments": false,
 489      "options": {}
 490    },
 491    {
 492      "id": "deepseek/deepseek-v3.1-terminus",
 493      "name": "DeepSeek: DeepSeek V3.1 Terminus",
 494      "cost_per_1m_in": 0.27,
 495      "cost_per_1m_out": 1,
 496      "cost_per_1m_in_cached": 0,
 497      "cost_per_1m_out_cached": 0,
 498      "context_window": 163840,
 499      "default_max_tokens": 16384,
 500      "can_reason": true,
 501      "reasoning_levels": [
 502        "low",
 503        "medium",
 504        "high"
 505      ],
 506      "default_reasoning_effort": "medium",
 507      "supports_attachments": false,
 508      "options": {}
 509    },
 510    {
 511      "id": "deepseek/deepseek-v3.1-terminus:exacto",
 512      "name": "DeepSeek: DeepSeek V3.1 Terminus (exacto)",
 513      "cost_per_1m_in": 0.27,
 514      "cost_per_1m_out": 1,
 515      "cost_per_1m_in_cached": 0,
 516      "cost_per_1m_out_cached": 0,
 517      "context_window": 163840,
 518      "default_max_tokens": 16384,
 519      "can_reason": true,
 520      "reasoning_levels": [
 521        "low",
 522        "medium",
 523        "high"
 524      ],
 525      "default_reasoning_effort": "medium",
 526      "supports_attachments": false,
 527      "options": {}
 528    },
 529    {
 530      "id": "deepseek/deepseek-v3.2-exp",
 531      "name": "DeepSeek: DeepSeek V3.2 Exp",
 532      "cost_per_1m_in": 0.27,
 533      "cost_per_1m_out": 0.39999999999999997,
 534      "cost_per_1m_in_cached": 0,
 535      "cost_per_1m_out_cached": 0,
 536      "context_window": 163840,
 537      "default_max_tokens": 16384,
 538      "can_reason": true,
 539      "reasoning_levels": [
 540        "low",
 541        "medium",
 542        "high"
 543      ],
 544      "default_reasoning_effort": "medium",
 545      "supports_attachments": false,
 546      "options": {}
 547    },
 548    {
 549      "id": "deepseek/deepseek-r1",
 550      "name": "DeepSeek: R1",
 551      "cost_per_1m_in": 0.7,
 552      "cost_per_1m_out": 2.4,
 553      "cost_per_1m_in_cached": 0,
 554      "cost_per_1m_out_cached": 0,
 555      "context_window": 163840,
 556      "default_max_tokens": 81920,
 557      "can_reason": true,
 558      "reasoning_levels": [
 559        "low",
 560        "medium",
 561        "high"
 562      ],
 563      "default_reasoning_effort": "medium",
 564      "supports_attachments": false,
 565      "options": {}
 566    },
 567    {
 568      "id": "deepseek/deepseek-r1-0528",
 569      "name": "DeepSeek: R1 0528",
 570      "cost_per_1m_in": 0.56,
 571      "cost_per_1m_out": 2,
 572      "cost_per_1m_in_cached": 0,
 573      "cost_per_1m_out_cached": 0,
 574      "context_window": 163840,
 575      "default_max_tokens": 16384,
 576      "can_reason": true,
 577      "reasoning_levels": [
 578        "low",
 579        "medium",
 580        "high"
 581      ],
 582      "default_reasoning_effort": "medium",
 583      "supports_attachments": false,
 584      "options": {}
 585    },
 586    {
 587      "id": "deepseek/deepseek-r1-distill-llama-70b",
 588      "name": "DeepSeek: R1 Distill Llama 70B",
 589      "cost_per_1m_in": 0.03,
 590      "cost_per_1m_out": 0.13,
 591      "cost_per_1m_in_cached": 0,
 592      "cost_per_1m_out_cached": 0,
 593      "context_window": 131072,
 594      "default_max_tokens": 65536,
 595      "can_reason": true,
 596      "reasoning_levels": [
 597        "low",
 598        "medium",
 599        "high"
 600      ],
 601      "default_reasoning_effort": "medium",
 602      "supports_attachments": false,
 603      "options": {}
 604    },
 605    {
 606      "id": "google/gemini-2.0-flash-001",
 607      "name": "Google: Gemini 2.0 Flash",
 608      "cost_per_1m_in": 0.09999999999999999,
 609      "cost_per_1m_out": 0.39999999999999997,
 610      "cost_per_1m_in_cached": 0.18330000000000002,
 611      "cost_per_1m_out_cached": 0.024999999999999998,
 612      "context_window": 1048576,
 613      "default_max_tokens": 4096,
 614      "can_reason": false,
 615      "supports_attachments": true,
 616      "options": {}
 617    },
 618    {
 619      "id": "google/gemini-2.0-flash-exp:free",
 620      "name": "Google: Gemini 2.0 Flash Experimental (free)",
 621      "cost_per_1m_in": 0,
 622      "cost_per_1m_out": 0,
 623      "cost_per_1m_in_cached": 0,
 624      "cost_per_1m_out_cached": 0,
 625      "context_window": 1048576,
 626      "default_max_tokens": 4096,
 627      "can_reason": false,
 628      "supports_attachments": true,
 629      "options": {}
 630    },
 631    {
 632      "id": "google/gemini-2.0-flash-lite-001",
 633      "name": "Google: Gemini 2.0 Flash Lite",
 634      "cost_per_1m_in": 0.075,
 635      "cost_per_1m_out": 0.3,
 636      "cost_per_1m_in_cached": 0,
 637      "cost_per_1m_out_cached": 0,
 638      "context_window": 1048576,
 639      "default_max_tokens": 4096,
 640      "can_reason": false,
 641      "supports_attachments": true,
 642      "options": {}
 643    },
 644    {
 645      "id": "google/gemini-2.5-flash",
 646      "name": "Google: Gemini 2.5 Flash",
 647      "cost_per_1m_in": 0.3,
 648      "cost_per_1m_out": 2.5,
 649      "cost_per_1m_in_cached": 0.3833,
 650      "cost_per_1m_out_cached": 0.03,
 651      "context_window": 1048576,
 652      "default_max_tokens": 32767,
 653      "can_reason": true,
 654      "reasoning_levels": [
 655        "low",
 656        "medium",
 657        "high"
 658      ],
 659      "default_reasoning_effort": "medium",
 660      "supports_attachments": true,
 661      "options": {}
 662    },
 663    {
 664      "id": "google/gemini-2.5-flash-lite",
 665      "name": "Google: Gemini 2.5 Flash Lite",
 666      "cost_per_1m_in": 0.09999999999999999,
 667      "cost_per_1m_out": 0.39999999999999997,
 668      "cost_per_1m_in_cached": 0.18330000000000002,
 669      "cost_per_1m_out_cached": 0.01,
 670      "context_window": 1048576,
 671      "default_max_tokens": 32767,
 672      "can_reason": true,
 673      "reasoning_levels": [
 674        "low",
 675        "medium",
 676        "high"
 677      ],
 678      "default_reasoning_effort": "medium",
 679      "supports_attachments": true,
 680      "options": {}
 681    },
 682    {
 683      "id": "google/gemini-2.5-flash-lite-preview-09-2025",
 684      "name": "Google: Gemini 2.5 Flash Lite Preview 09-2025",
 685      "cost_per_1m_in": 0.09999999999999999,
 686      "cost_per_1m_out": 0.39999999999999997,
 687      "cost_per_1m_in_cached": 0,
 688      "cost_per_1m_out_cached": 0,
 689      "context_window": 1048576,
 690      "default_max_tokens": 32768,
 691      "can_reason": true,
 692      "reasoning_levels": [
 693        "low",
 694        "medium",
 695        "high"
 696      ],
 697      "default_reasoning_effort": "medium",
 698      "supports_attachments": true,
 699      "options": {}
 700    },
 701    {
 702      "id": "google/gemini-2.5-flash-preview-09-2025",
 703      "name": "Google: Gemini 2.5 Flash Preview 09-2025",
 704      "cost_per_1m_in": 0.3,
 705      "cost_per_1m_out": 2.5,
 706      "cost_per_1m_in_cached": 0.3833,
 707      "cost_per_1m_out_cached": 0.075,
 708      "context_window": 1048576,
 709      "default_max_tokens": 32767,
 710      "can_reason": true,
 711      "reasoning_levels": [
 712        "low",
 713        "medium",
 714        "high"
 715      ],
 716      "default_reasoning_effort": "medium",
 717      "supports_attachments": true,
 718      "options": {}
 719    },
 720    {
 721      "id": "google/gemini-2.5-pro",
 722      "name": "Google: Gemini 2.5 Pro",
 723      "cost_per_1m_in": 1.25,
 724      "cost_per_1m_out": 10,
 725      "cost_per_1m_in_cached": 1.625,
 726      "cost_per_1m_out_cached": 0.125,
 727      "context_window": 1048576,
 728      "default_max_tokens": 32768,
 729      "can_reason": true,
 730      "reasoning_levels": [
 731        "low",
 732        "medium",
 733        "high"
 734      ],
 735      "default_reasoning_effort": "medium",
 736      "supports_attachments": true,
 737      "options": {}
 738    },
 739    {
 740      "id": "google/gemini-2.5-pro-preview-05-06",
 741      "name": "Google: Gemini 2.5 Pro Preview 05-06",
 742      "cost_per_1m_in": 1.25,
 743      "cost_per_1m_out": 10,
 744      "cost_per_1m_in_cached": 1.625,
 745      "cost_per_1m_out_cached": 0.125,
 746      "context_window": 1048576,
 747      "default_max_tokens": 32768,
 748      "can_reason": true,
 749      "reasoning_levels": [
 750        "low",
 751        "medium",
 752        "high"
 753      ],
 754      "default_reasoning_effort": "medium",
 755      "supports_attachments": true,
 756      "options": {}
 757    },
 758    {
 759      "id": "google/gemini-2.5-pro-preview",
 760      "name": "Google: Gemini 2.5 Pro Preview 06-05",
 761      "cost_per_1m_in": 1.25,
 762      "cost_per_1m_out": 10,
 763      "cost_per_1m_in_cached": 1.625,
 764      "cost_per_1m_out_cached": 0.125,
 765      "context_window": 1048576,
 766      "default_max_tokens": 32768,
 767      "can_reason": true,
 768      "reasoning_levels": [
 769        "low",
 770        "medium",
 771        "high"
 772      ],
 773      "default_reasoning_effort": "medium",
 774      "supports_attachments": true,
 775      "options": {}
 776    },
 777    {
 778      "id": "google/gemini-3-pro-preview",
 779      "name": "Google: Gemini 3 Pro Preview",
 780      "cost_per_1m_in": 2,
 781      "cost_per_1m_out": 12,
 782      "cost_per_1m_in_cached": 2.375,
 783      "cost_per_1m_out_cached": 0.19999999999999998,
 784      "context_window": 1048576,
 785      "default_max_tokens": 32768,
 786      "can_reason": true,
 787      "reasoning_levels": [
 788        "low",
 789        "medium",
 790        "high"
 791      ],
 792      "default_reasoning_effort": "medium",
 793      "supports_attachments": true,
 794      "options": {}
 795    },
 796    {
 797      "id": "google/gemma-3-27b-it",
 798      "name": "Google: Gemma 3 27B",
 799      "cost_per_1m_in": 0.13,
 800      "cost_per_1m_out": 0.52,
 801      "cost_per_1m_in_cached": 0,
 802      "cost_per_1m_out_cached": 0,
 803      "context_window": 96000,
 804      "default_max_tokens": 48000,
 805      "can_reason": false,
 806      "supports_attachments": true,
 807      "options": {}
 808    },
 809    {
 810      "id": "inception/mercury",
 811      "name": "Inception: Mercury",
 812      "cost_per_1m_in": 0.25,
 813      "cost_per_1m_out": 1,
 814      "cost_per_1m_in_cached": 0,
 815      "cost_per_1m_out_cached": 0,
 816      "context_window": 128000,
 817      "default_max_tokens": 8192,
 818      "can_reason": false,
 819      "supports_attachments": false,
 820      "options": {}
 821    },
 822    {
 823      "id": "inception/mercury-coder",
 824      "name": "Inception: Mercury Coder",
 825      "cost_per_1m_in": 0.25,
 826      "cost_per_1m_out": 1,
 827      "cost_per_1m_in_cached": 0,
 828      "cost_per_1m_out_cached": 0,
 829      "context_window": 128000,
 830      "default_max_tokens": 8192,
 831      "can_reason": false,
 832      "supports_attachments": false,
 833      "options": {}
 834    },
 835    {
 836      "id": "kwaipilot/kat-coder-pro:free",
 837      "name": "Kwaipilot: KAT-Coder-Pro V1 (free)",
 838      "cost_per_1m_in": 0,
 839      "cost_per_1m_out": 0,
 840      "cost_per_1m_in_cached": 0,
 841      "cost_per_1m_out_cached": 0,
 842      "context_window": 256000,
 843      "default_max_tokens": 16000,
 844      "can_reason": false,
 845      "supports_attachments": false,
 846      "options": {}
 847    },
 848    {
 849      "id": "meituan/longcat-flash-chat:free",
 850      "name": "Meituan: LongCat Flash Chat (free)",
 851      "cost_per_1m_in": 0,
 852      "cost_per_1m_out": 0,
 853      "cost_per_1m_in_cached": 0,
 854      "cost_per_1m_out_cached": 0,
 855      "context_window": 131072,
 856      "default_max_tokens": 65536,
 857      "can_reason": false,
 858      "supports_attachments": false,
 859      "options": {}
 860    },
 861    {
 862      "id": "meta-llama/llama-3.1-405b-instruct",
 863      "name": "Meta: Llama 3.1 405B Instruct",
 864      "cost_per_1m_in": 3.5,
 865      "cost_per_1m_out": 3.5,
 866      "cost_per_1m_in_cached": 0,
 867      "cost_per_1m_out_cached": 0,
 868      "context_window": 130815,
 869      "default_max_tokens": 13081,
 870      "can_reason": false,
 871      "supports_attachments": false,
 872      "options": {}
 873    },
 874    {
 875      "id": "meta-llama/llama-3.1-70b-instruct",
 876      "name": "Meta: Llama 3.1 70B Instruct",
 877      "cost_per_1m_in": 0.39999999999999997,
 878      "cost_per_1m_out": 0.39999999999999997,
 879      "cost_per_1m_in_cached": 0,
 880      "cost_per_1m_out_cached": 0,
 881      "context_window": 131072,
 882      "default_max_tokens": 8192,
 883      "can_reason": false,
 884      "supports_attachments": false,
 885      "options": {}
 886    },
 887    {
 888      "id": "meta-llama/llama-3.1-8b-instruct",
 889      "name": "Meta: Llama 3.1 8B Instruct",
 890      "cost_per_1m_in": 0.03,
 891      "cost_per_1m_out": 0.049999999999999996,
 892      "cost_per_1m_in_cached": 0,
 893      "cost_per_1m_out_cached": 0,
 894      "context_window": 131072,
 895      "default_max_tokens": 8192,
 896      "can_reason": false,
 897      "supports_attachments": false,
 898      "options": {}
 899    },
 900    {
 901      "id": "meta-llama/llama-3.2-3b-instruct",
 902      "name": "Meta: Llama 3.2 3B Instruct",
 903      "cost_per_1m_in": 0.024,
 904      "cost_per_1m_out": 0.04,
 905      "cost_per_1m_in_cached": 0,
 906      "cost_per_1m_out_cached": 0,
 907      "context_window": 32768,
 908      "default_max_tokens": 16000,
 909      "can_reason": false,
 910      "supports_attachments": false,
 911      "options": {}
 912    },
 913    {
 914      "id": "meta-llama/llama-3.3-70b-instruct",
 915      "name": "Meta: Llama 3.3 70B Instruct",
 916      "cost_per_1m_in": 0.13,
 917      "cost_per_1m_out": 0.38,
 918      "cost_per_1m_in_cached": 0,
 919      "cost_per_1m_out_cached": 0,
 920      "context_window": 131072,
 921      "default_max_tokens": 8192,
 922      "can_reason": false,
 923      "supports_attachments": false,
 924      "options": {}
 925    },
 926    {
 927      "id": "meta-llama/llama-3.3-70b-instruct:free",
 928      "name": "Meta: Llama 3.3 70B Instruct (free)",
 929      "cost_per_1m_in": 0,
 930      "cost_per_1m_out": 0,
 931      "cost_per_1m_in_cached": 0,
 932      "cost_per_1m_out_cached": 0,
 933      "context_window": 131072,
 934      "default_max_tokens": 13107,
 935      "can_reason": false,
 936      "supports_attachments": false,
 937      "options": {}
 938    },
 939    {
 940      "id": "meta-llama/llama-4-maverick",
 941      "name": "Meta: Llama 4 Maverick",
 942      "cost_per_1m_in": 0.27,
 943      "cost_per_1m_out": 0.85,
 944      "cost_per_1m_in_cached": 0,
 945      "cost_per_1m_out_cached": 0,
 946      "context_window": 1048576,
 947      "default_max_tokens": 104857,
 948      "can_reason": false,
 949      "supports_attachments": true,
 950      "options": {}
 951    },
 952    {
 953      "id": "meta-llama/llama-4-scout",
 954      "name": "Meta: Llama 4 Scout",
 955      "cost_per_1m_in": 0.25,
 956      "cost_per_1m_out": 0.7,
 957      "cost_per_1m_in_cached": 0,
 958      "cost_per_1m_out_cached": 0,
 959      "context_window": 1310720,
 960      "default_max_tokens": 4096,
 961      "can_reason": false,
 962      "supports_attachments": true,
 963      "options": {}
 964    },
 965    {
 966      "id": "microsoft/phi-3-medium-128k-instruct",
 967      "name": "Microsoft: Phi-3 Medium 128K Instruct",
 968      "cost_per_1m_in": 1,
 969      "cost_per_1m_out": 1,
 970      "cost_per_1m_in_cached": 0,
 971      "cost_per_1m_out_cached": 0,
 972      "context_window": 128000,
 973      "default_max_tokens": 12800,
 974      "can_reason": false,
 975      "supports_attachments": false,
 976      "options": {}
 977    },
 978    {
 979      "id": "microsoft/phi-3-mini-128k-instruct",
 980      "name": "Microsoft: Phi-3 Mini 128K Instruct",
 981      "cost_per_1m_in": 0.09999999999999999,
 982      "cost_per_1m_out": 0.09999999999999999,
 983      "cost_per_1m_in_cached": 0,
 984      "cost_per_1m_out_cached": 0,
 985      "context_window": 128000,
 986      "default_max_tokens": 12800,
 987      "can_reason": false,
 988      "supports_attachments": false,
 989      "options": {}
 990    },
 991    {
 992      "id": "microsoft/phi-3.5-mini-128k-instruct",
 993      "name": "Microsoft: Phi-3.5 Mini 128K Instruct",
 994      "cost_per_1m_in": 0.09999999999999999,
 995      "cost_per_1m_out": 0.09999999999999999,
 996      "cost_per_1m_in_cached": 0,
 997      "cost_per_1m_out_cached": 0,
 998      "context_window": 128000,
 999      "default_max_tokens": 12800,
1000      "can_reason": false,
1001      "supports_attachments": false,
1002      "options": {}
1003    },
1004    {
1005      "id": "minimax/minimax-m2",
1006      "name": "MiniMax: MiniMax M2",
1007      "cost_per_1m_in": 0.255,
1008      "cost_per_1m_out": 1.02,
1009      "cost_per_1m_in_cached": 0,
1010      "cost_per_1m_out_cached": 0,
1011      "context_window": 204800,
1012      "default_max_tokens": 65536,
1013      "can_reason": true,
1014      "reasoning_levels": [
1015        "low",
1016        "medium",
1017        "high"
1018      ],
1019      "default_reasoning_effort": "medium",
1020      "supports_attachments": false,
1021      "options": {}
1022    },
1023    {
1024      "id": "mistralai/mistral-large",
1025      "name": "Mistral Large",
1026      "cost_per_1m_in": 2,
1027      "cost_per_1m_out": 6,
1028      "cost_per_1m_in_cached": 0,
1029      "cost_per_1m_out_cached": 0,
1030      "context_window": 128000,
1031      "default_max_tokens": 12800,
1032      "can_reason": false,
1033      "supports_attachments": false,
1034      "options": {}
1035    },
1036    {
1037      "id": "mistralai/mistral-large-2407",
1038      "name": "Mistral Large 2407",
1039      "cost_per_1m_in": 2,
1040      "cost_per_1m_out": 6,
1041      "cost_per_1m_in_cached": 0,
1042      "cost_per_1m_out_cached": 0,
1043      "context_window": 131072,
1044      "default_max_tokens": 13107,
1045      "can_reason": false,
1046      "supports_attachments": false,
1047      "options": {}
1048    },
1049    {
1050      "id": "mistralai/mistral-large-2411",
1051      "name": "Mistral Large 2411",
1052      "cost_per_1m_in": 2,
1053      "cost_per_1m_out": 6,
1054      "cost_per_1m_in_cached": 0,
1055      "cost_per_1m_out_cached": 0,
1056      "context_window": 131072,
1057      "default_max_tokens": 13107,
1058      "can_reason": false,
1059      "supports_attachments": false,
1060      "options": {}
1061    },
1062    {
1063      "id": "mistralai/mistral-small",
1064      "name": "Mistral Small",
1065      "cost_per_1m_in": 0.19999999999999998,
1066      "cost_per_1m_out": 0.6,
1067      "cost_per_1m_in_cached": 0,
1068      "cost_per_1m_out_cached": 0,
1069      "context_window": 32768,
1070      "default_max_tokens": 3276,
1071      "can_reason": false,
1072      "supports_attachments": false,
1073      "options": {}
1074    },
1075    {
1076      "id": "mistralai/mistral-tiny",
1077      "name": "Mistral Tiny",
1078      "cost_per_1m_in": 0.25,
1079      "cost_per_1m_out": 0.25,
1080      "cost_per_1m_in_cached": 0,
1081      "cost_per_1m_out_cached": 0,
1082      "context_window": 32768,
1083      "default_max_tokens": 3276,
1084      "can_reason": false,
1085      "supports_attachments": false,
1086      "options": {}
1087    },
1088    {
1089      "id": "mistralai/codestral-2501",
1090      "name": "Mistral: Codestral 2501",
1091      "cost_per_1m_in": 0.3,
1092      "cost_per_1m_out": 0.8999999999999999,
1093      "cost_per_1m_in_cached": 0,
1094      "cost_per_1m_out_cached": 0,
1095      "context_window": 256000,
1096      "default_max_tokens": 25600,
1097      "can_reason": false,
1098      "supports_attachments": false,
1099      "options": {}
1100    },
1101    {
1102      "id": "mistralai/codestral-2508",
1103      "name": "Mistral: Codestral 2508",
1104      "cost_per_1m_in": 0.3,
1105      "cost_per_1m_out": 0.8999999999999999,
1106      "cost_per_1m_in_cached": 0,
1107      "cost_per_1m_out_cached": 0,
1108      "context_window": 256000,
1109      "default_max_tokens": 25600,
1110      "can_reason": false,
1111      "supports_attachments": false,
1112      "options": {}
1113    },
1114    {
1115      "id": "mistralai/devstral-medium",
1116      "name": "Mistral: Devstral Medium",
1117      "cost_per_1m_in": 0.39999999999999997,
1118      "cost_per_1m_out": 2,
1119      "cost_per_1m_in_cached": 0,
1120      "cost_per_1m_out_cached": 0,
1121      "context_window": 131072,
1122      "default_max_tokens": 13107,
1123      "can_reason": false,
1124      "supports_attachments": false,
1125      "options": {}
1126    },
1127    {
1128      "id": "mistralai/devstral-small",
1129      "name": "Mistral: Devstral Small 1.1",
1130      "cost_per_1m_in": 0.09999999999999999,
1131      "cost_per_1m_out": 0.3,
1132      "cost_per_1m_in_cached": 0,
1133      "cost_per_1m_out_cached": 0,
1134      "context_window": 131072,
1135      "default_max_tokens": 13107,
1136      "can_reason": false,
1137      "supports_attachments": false,
1138      "options": {}
1139    },
1140    {
1141      "id": "mistralai/magistral-medium-2506",
1142      "name": "Mistral: Magistral Medium 2506",
1143      "cost_per_1m_in": 2,
1144      "cost_per_1m_out": 5,
1145      "cost_per_1m_in_cached": 0,
1146      "cost_per_1m_out_cached": 0,
1147      "context_window": 40960,
1148      "default_max_tokens": 20000,
1149      "can_reason": true,
1150      "reasoning_levels": [
1151        "low",
1152        "medium",
1153        "high"
1154      ],
1155      "default_reasoning_effort": "medium",
1156      "supports_attachments": false,
1157      "options": {}
1158    },
1159    {
1160      "id": "mistralai/magistral-medium-2506:thinking",
1161      "name": "Mistral: Magistral Medium 2506 (thinking)",
1162      "cost_per_1m_in": 2,
1163      "cost_per_1m_out": 5,
1164      "cost_per_1m_in_cached": 0,
1165      "cost_per_1m_out_cached": 0,
1166      "context_window": 40960,
1167      "default_max_tokens": 20000,
1168      "can_reason": true,
1169      "reasoning_levels": [
1170        "low",
1171        "medium",
1172        "high"
1173      ],
1174      "default_reasoning_effort": "medium",
1175      "supports_attachments": false,
1176      "options": {}
1177    },
1178    {
1179      "id": "mistralai/magistral-small-2506",
1180      "name": "Mistral: Magistral Small 2506",
1181      "cost_per_1m_in": 0.5,
1182      "cost_per_1m_out": 1.5,
1183      "cost_per_1m_in_cached": 0,
1184      "cost_per_1m_out_cached": 0,
1185      "context_window": 40000,
1186      "default_max_tokens": 20000,
1187      "can_reason": true,
1188      "reasoning_levels": [
1189        "low",
1190        "medium",
1191        "high"
1192      ],
1193      "default_reasoning_effort": "medium",
1194      "supports_attachments": false,
1195      "options": {}
1196    },
1197    {
1198      "id": "mistralai/ministral-3b",
1199      "name": "Mistral: Ministral 3B",
1200      "cost_per_1m_in": 0.04,
1201      "cost_per_1m_out": 0.04,
1202      "cost_per_1m_in_cached": 0,
1203      "cost_per_1m_out_cached": 0,
1204      "context_window": 131072,
1205      "default_max_tokens": 13107,
1206      "can_reason": false,
1207      "supports_attachments": false,
1208      "options": {}
1209    },
1210    {
1211      "id": "mistralai/ministral-8b",
1212      "name": "Mistral: Ministral 8B",
1213      "cost_per_1m_in": 0.09999999999999999,
1214      "cost_per_1m_out": 0.09999999999999999,
1215      "cost_per_1m_in_cached": 0,
1216      "cost_per_1m_out_cached": 0,
1217      "context_window": 131072,
1218      "default_max_tokens": 13107,
1219      "can_reason": false,
1220      "supports_attachments": false,
1221      "options": {}
1222    },
1223    {
1224      "id": "mistralai/mistral-7b-instruct",
1225      "name": "Mistral: Mistral 7B Instruct",
1226      "cost_per_1m_in": 0.028,
1227      "cost_per_1m_out": 0.054,
1228      "cost_per_1m_in_cached": 0,
1229      "cost_per_1m_out_cached": 0,
1230      "context_window": 32768,
1231      "default_max_tokens": 8192,
1232      "can_reason": false,
1233      "supports_attachments": false,
1234      "options": {}
1235    },
1236    {
1237      "id": "mistralai/mistral-7b-instruct:free",
1238      "name": "Mistral: Mistral 7B Instruct (free)",
1239      "cost_per_1m_in": 0,
1240      "cost_per_1m_out": 0,
1241      "cost_per_1m_in_cached": 0,
1242      "cost_per_1m_out_cached": 0,
1243      "context_window": 32768,
1244      "default_max_tokens": 8192,
1245      "can_reason": false,
1246      "supports_attachments": false,
1247      "options": {}
1248    },
1249    {
1250      "id": "mistralai/mistral-medium-3",
1251      "name": "Mistral: Mistral Medium 3",
1252      "cost_per_1m_in": 0.39999999999999997,
1253      "cost_per_1m_out": 2,
1254      "cost_per_1m_in_cached": 0,
1255      "cost_per_1m_out_cached": 0,
1256      "context_window": 131072,
1257      "default_max_tokens": 13107,
1258      "can_reason": false,
1259      "supports_attachments": true,
1260      "options": {}
1261    },
1262    {
1263      "id": "mistralai/mistral-medium-3.1",
1264      "name": "Mistral: Mistral Medium 3.1",
1265      "cost_per_1m_in": 0.39999999999999997,
1266      "cost_per_1m_out": 2,
1267      "cost_per_1m_in_cached": 0,
1268      "cost_per_1m_out_cached": 0,
1269      "context_window": 131072,
1270      "default_max_tokens": 13107,
1271      "can_reason": false,
1272      "supports_attachments": true,
1273      "options": {}
1274    },
1275    {
1276      "id": "mistralai/mistral-nemo",
1277      "name": "Mistral: Mistral Nemo",
1278      "cost_per_1m_in": 0.15,
1279      "cost_per_1m_out": 0.15,
1280      "cost_per_1m_in_cached": 0,
1281      "cost_per_1m_out_cached": 0,
1282      "context_window": 131072,
1283      "default_max_tokens": 13107,
1284      "can_reason": false,
1285      "supports_attachments": false,
1286      "options": {}
1287    },
1288    {
1289      "id": "mistralai/mistral-small-24b-instruct-2501",
1290      "name": "Mistral: Mistral Small 3",
1291      "cost_per_1m_in": 0.09999999999999999,
1292      "cost_per_1m_out": 0.3,
1293      "cost_per_1m_in_cached": 0,
1294      "cost_per_1m_out_cached": 0,
1295      "context_window": 32768,
1296      "default_max_tokens": 3276,
1297      "can_reason": false,
1298      "supports_attachments": false,
1299      "options": {}
1300    },
1301    {
1302      "id": "mistralai/mistral-small-3.1-24b-instruct",
1303      "name": "Mistral: Mistral Small 3.1 24B",
1304      "cost_per_1m_in": 0.09999999999999999,
1305      "cost_per_1m_out": 0.3,
1306      "cost_per_1m_in_cached": 0,
1307      "cost_per_1m_out_cached": 0,
1308      "context_window": 131072,
1309      "default_max_tokens": 13107,
1310      "can_reason": false,
1311      "supports_attachments": true,
1312      "options": {}
1313    },
1314    {
1315      "id": "mistralai/mistral-small-3.1-24b-instruct:free",
1316      "name": "Mistral: Mistral Small 3.1 24B (free)",
1317      "cost_per_1m_in": 0,
1318      "cost_per_1m_out": 0,
1319      "cost_per_1m_in_cached": 0,
1320      "cost_per_1m_out_cached": 0,
1321      "context_window": 128000,
1322      "default_max_tokens": 12800,
1323      "can_reason": false,
1324      "supports_attachments": true,
1325      "options": {}
1326    },
1327    {
1328      "id": "mistralai/mistral-small-3.2-24b-instruct",
1329      "name": "Mistral: Mistral Small 3.2 24B",
1330      "cost_per_1m_in": 0.09999999999999999,
1331      "cost_per_1m_out": 0.3,
1332      "cost_per_1m_in_cached": 0,
1333      "cost_per_1m_out_cached": 0,
1334      "context_window": 131072,
1335      "default_max_tokens": 13107,
1336      "can_reason": false,
1337      "supports_attachments": true,
1338      "options": {}
1339    },
1340    {
1341      "id": "mistralai/mixtral-8x22b-instruct",
1342      "name": "Mistral: Mixtral 8x22B Instruct",
1343      "cost_per_1m_in": 2,
1344      "cost_per_1m_out": 6,
1345      "cost_per_1m_in_cached": 0,
1346      "cost_per_1m_out_cached": 0,
1347      "context_window": 65536,
1348      "default_max_tokens": 6553,
1349      "can_reason": false,
1350      "supports_attachments": false,
1351      "options": {}
1352    },
1353    {
1354      "id": "mistralai/mixtral-8x7b-instruct",
1355      "name": "Mistral: Mixtral 8x7B Instruct",
1356      "cost_per_1m_in": 0.54,
1357      "cost_per_1m_out": 0.54,
1358      "cost_per_1m_in_cached": 0,
1359      "cost_per_1m_out_cached": 0,
1360      "context_window": 32768,
1361      "default_max_tokens": 8192,
1362      "can_reason": false,
1363      "supports_attachments": false,
1364      "options": {}
1365    },
1366    {
1367      "id": "mistralai/pixtral-large-2411",
1368      "name": "Mistral: Pixtral Large 2411",
1369      "cost_per_1m_in": 2,
1370      "cost_per_1m_out": 6,
1371      "cost_per_1m_in_cached": 0,
1372      "cost_per_1m_out_cached": 0,
1373      "context_window": 131072,
1374      "default_max_tokens": 13107,
1375      "can_reason": false,
1376      "supports_attachments": true,
1377      "options": {}
1378    },
1379    {
1380      "id": "mistralai/mistral-saba",
1381      "name": "Mistral: Saba",
1382      "cost_per_1m_in": 0.19999999999999998,
1383      "cost_per_1m_out": 0.6,
1384      "cost_per_1m_in_cached": 0,
1385      "cost_per_1m_out_cached": 0,
1386      "context_window": 32768,
1387      "default_max_tokens": 3276,
1388      "can_reason": false,
1389      "supports_attachments": false,
1390      "options": {}
1391    },
1392    {
1393      "id": "mistralai/voxtral-small-24b-2507",
1394      "name": "Mistral: Voxtral Small 24B 2507",
1395      "cost_per_1m_in": 0.09999999999999999,
1396      "cost_per_1m_out": 0.3,
1397      "cost_per_1m_in_cached": 0,
1398      "cost_per_1m_out_cached": 0,
1399      "context_window": 32000,
1400      "default_max_tokens": 3200,
1401      "can_reason": false,
1402      "supports_attachments": false,
1403      "options": {}
1404    },
1405    {
1406      "id": "moonshotai/kimi-k2",
1407      "name": "MoonshotAI: Kimi K2 0711",
1408      "cost_per_1m_in": 0.5,
1409      "cost_per_1m_out": 2.4,
1410      "cost_per_1m_in_cached": 0,
1411      "cost_per_1m_out_cached": 0,
1412      "context_window": 131072,
1413      "default_max_tokens": 13107,
1414      "can_reason": false,
1415      "supports_attachments": false,
1416      "options": {}
1417    },
1418    {
1419      "id": "moonshotai/kimi-k2-0905",
1420      "name": "MoonshotAI: Kimi K2 0905",
1421      "cost_per_1m_in": 0.39999999999999997,
1422      "cost_per_1m_out": 2,
1423      "cost_per_1m_in_cached": 0,
1424      "cost_per_1m_out_cached": 0,
1425      "context_window": 262144,
1426      "default_max_tokens": 131072,
1427      "can_reason": false,
1428      "supports_attachments": false,
1429      "options": {}
1430    },
1431    {
1432      "id": "moonshotai/kimi-k2-0905:exacto",
1433      "name": "MoonshotAI: Kimi K2 0905 (exacto)",
1434      "cost_per_1m_in": 0.6,
1435      "cost_per_1m_out": 2.5,
1436      "cost_per_1m_in_cached": 0,
1437      "cost_per_1m_out_cached": 0,
1438      "context_window": 262144,
1439      "default_max_tokens": 26214,
1440      "can_reason": false,
1441      "supports_attachments": false,
1442      "options": {}
1443    },
1444    {
1445      "id": "moonshotai/kimi-k2-thinking",
1446      "name": "MoonshotAI: Kimi K2 Thinking",
1447      "cost_per_1m_in": 0.5,
1448      "cost_per_1m_out": 2.5,
1449      "cost_per_1m_in_cached": 0,
1450      "cost_per_1m_out_cached": 0,
1451      "context_window": 262144,
1452      "default_max_tokens": 131072,
1453      "can_reason": true,
1454      "reasoning_levels": [
1455        "low",
1456        "medium",
1457        "high"
1458      ],
1459      "default_reasoning_effort": "medium",
1460      "supports_attachments": false,
1461      "options": {}
1462    },
1463    {
1464      "id": "nvidia/llama-3.1-nemotron-70b-instruct",
1465      "name": "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
1466      "cost_per_1m_in": 1.2,
1467      "cost_per_1m_out": 1.2,
1468      "cost_per_1m_in_cached": 0,
1469      "cost_per_1m_out_cached": 0,
1470      "context_window": 131072,
1471      "default_max_tokens": 8192,
1472      "can_reason": false,
1473      "supports_attachments": false,
1474      "options": {}
1475    },
1476    {
1477      "id": "nvidia/llama-3.3-nemotron-super-49b-v1.5",
1478      "name": "NVIDIA: Llama 3.3 Nemotron Super 49B V1.5",
1479      "cost_per_1m_in": 0.09999999999999999,
1480      "cost_per_1m_out": 0.39999999999999997,
1481      "cost_per_1m_in_cached": 0,
1482      "cost_per_1m_out_cached": 0,
1483      "context_window": 131072,
1484      "default_max_tokens": 13107,
1485      "can_reason": true,
1486      "reasoning_levels": [
1487        "low",
1488        "medium",
1489        "high"
1490      ],
1491      "default_reasoning_effort": "medium",
1492      "supports_attachments": false,
1493      "options": {}
1494    },
1495    {
1496      "id": "nvidia/nemotron-nano-12b-v2-vl:free",
1497      "name": "NVIDIA: Nemotron Nano 12B 2 VL (free)",
1498      "cost_per_1m_in": 0,
1499      "cost_per_1m_out": 0,
1500      "cost_per_1m_in_cached": 0,
1501      "cost_per_1m_out_cached": 0,
1502      "context_window": 128000,
1503      "default_max_tokens": 64000,
1504      "can_reason": true,
1505      "reasoning_levels": [
1506        "low",
1507        "medium",
1508        "high"
1509      ],
1510      "default_reasoning_effort": "medium",
1511      "supports_attachments": true,
1512      "options": {}
1513    },
1514    {
1515      "id": "nvidia/nemotron-nano-9b-v2",
1516      "name": "NVIDIA: Nemotron Nano 9B V2",
1517      "cost_per_1m_in": 0.04,
1518      "cost_per_1m_out": 0.16,
1519      "cost_per_1m_in_cached": 0,
1520      "cost_per_1m_out_cached": 0,
1521      "context_window": 131072,
1522      "default_max_tokens": 13107,
1523      "can_reason": true,
1524      "reasoning_levels": [
1525        "low",
1526        "medium",
1527        "high"
1528      ],
1529      "default_reasoning_effort": "medium",
1530      "supports_attachments": false,
1531      "options": {}
1532    },
1533    {
1534      "id": "nvidia/nemotron-nano-9b-v2:free",
1535      "name": "NVIDIA: Nemotron Nano 9B V2 (free)",
1536      "cost_per_1m_in": 0,
1537      "cost_per_1m_out": 0,
1538      "cost_per_1m_in_cached": 0,
1539      "cost_per_1m_out_cached": 0,
1540      "context_window": 128000,
1541      "default_max_tokens": 12800,
1542      "can_reason": true,
1543      "reasoning_levels": [
1544        "low",
1545        "medium",
1546        "high"
1547      ],
1548      "default_reasoning_effort": "medium",
1549      "supports_attachments": false,
1550      "options": {}
1551    },
1552    {
1553      "id": "nousresearch/deephermes-3-mistral-24b-preview",
1554      "name": "Nous: DeepHermes 3 Mistral 24B Preview",
1555      "cost_per_1m_in": 0.049999999999999996,
1556      "cost_per_1m_out": 0.19999999999999998,
1557      "cost_per_1m_in_cached": 0,
1558      "cost_per_1m_out_cached": 0,
1559      "context_window": 32768,
1560      "default_max_tokens": 16384,
1561      "can_reason": true,
1562      "reasoning_levels": [
1563        "low",
1564        "medium",
1565        "high"
1566      ],
1567      "default_reasoning_effort": "medium",
1568      "supports_attachments": false,
1569      "options": {}
1570    },
1571    {
1572      "id": "nousresearch/hermes-4-405b",
1573      "name": "Nous: Hermes 4 405B",
1574      "cost_per_1m_in": 0.3,
1575      "cost_per_1m_out": 1.2,
1576      "cost_per_1m_in_cached": 0,
1577      "cost_per_1m_out_cached": 0,
1578      "context_window": 131072,
1579      "default_max_tokens": 65536,
1580      "can_reason": true,
1581      "reasoning_levels": [
1582        "low",
1583        "medium",
1584        "high"
1585      ],
1586      "default_reasoning_effort": "medium",
1587      "supports_attachments": false,
1588      "options": {}
1589    },
1590    {
1591      "id": "nousresearch/hermes-4-70b",
1592      "name": "Nous: Hermes 4 70B",
1593      "cost_per_1m_in": 0.11,
1594      "cost_per_1m_out": 0.38,
1595      "cost_per_1m_in_cached": 0,
1596      "cost_per_1m_out_cached": 0,
1597      "context_window": 131072,
1598      "default_max_tokens": 65536,
1599      "can_reason": true,
1600      "reasoning_levels": [
1601        "low",
1602        "medium",
1603        "high"
1604      ],
1605      "default_reasoning_effort": "medium",
1606      "supports_attachments": false,
1607      "options": {}
1608    },
1609    {
1610      "id": "openai/codex-mini",
1611      "name": "OpenAI: Codex Mini",
1612      "cost_per_1m_in": 1.5,
1613      "cost_per_1m_out": 6,
1614      "cost_per_1m_in_cached": 0,
1615      "cost_per_1m_out_cached": 0.375,
1616      "context_window": 200000,
1617      "default_max_tokens": 50000,
1618      "can_reason": true,
1619      "reasoning_levels": [
1620        "low",
1621        "medium",
1622        "high"
1623      ],
1624      "default_reasoning_effort": "medium",
1625      "supports_attachments": true,
1626      "options": {}
1627    },
1628    {
1629      "id": "openai/gpt-4-turbo",
1630      "name": "OpenAI: GPT-4 Turbo",
1631      "cost_per_1m_in": 10,
1632      "cost_per_1m_out": 30,
1633      "cost_per_1m_in_cached": 0,
1634      "cost_per_1m_out_cached": 0,
1635      "context_window": 128000,
1636      "default_max_tokens": 2048,
1637      "can_reason": false,
1638      "supports_attachments": true,
1639      "options": {}
1640    },
1641    {
1642      "id": "openai/gpt-4-1106-preview",
1643      "name": "OpenAI: GPT-4 Turbo (older v1106)",
1644      "cost_per_1m_in": 10,
1645      "cost_per_1m_out": 30,
1646      "cost_per_1m_in_cached": 0,
1647      "cost_per_1m_out_cached": 0,
1648      "context_window": 128000,
1649      "default_max_tokens": 2048,
1650      "can_reason": false,
1651      "supports_attachments": false,
1652      "options": {}
1653    },
1654    {
1655      "id": "openai/gpt-4-turbo-preview",
1656      "name": "OpenAI: GPT-4 Turbo Preview",
1657      "cost_per_1m_in": 10,
1658      "cost_per_1m_out": 30,
1659      "cost_per_1m_in_cached": 0,
1660      "cost_per_1m_out_cached": 0,
1661      "context_window": 128000,
1662      "default_max_tokens": 2048,
1663      "can_reason": false,
1664      "supports_attachments": false,
1665      "options": {}
1666    },
1667    {
1668      "id": "openai/gpt-4.1",
1669      "name": "OpenAI: GPT-4.1",
1670      "cost_per_1m_in": 2,
1671      "cost_per_1m_out": 8,
1672      "cost_per_1m_in_cached": 0,
1673      "cost_per_1m_out_cached": 0.5,
1674      "context_window": 1047576,
1675      "default_max_tokens": 104757,
1676      "can_reason": false,
1677      "supports_attachments": true,
1678      "options": {}
1679    },
1680    {
1681      "id": "openai/gpt-4.1-mini",
1682      "name": "OpenAI: GPT-4.1 Mini",
1683      "cost_per_1m_in": 0.39999999999999997,
1684      "cost_per_1m_out": 1.5999999999999999,
1685      "cost_per_1m_in_cached": 0,
1686      "cost_per_1m_out_cached": 0.09999999999999999,
1687      "context_window": 1047576,
1688      "default_max_tokens": 104757,
1689      "can_reason": false,
1690      "supports_attachments": true,
1691      "options": {}
1692    },
1693    {
1694      "id": "openai/gpt-4.1-nano",
1695      "name": "OpenAI: GPT-4.1 Nano",
1696      "cost_per_1m_in": 0.09999999999999999,
1697      "cost_per_1m_out": 0.39999999999999997,
1698      "cost_per_1m_in_cached": 0,
1699      "cost_per_1m_out_cached": 0.03,
1700      "context_window": 1047576,
1701      "default_max_tokens": 104757,
1702      "can_reason": false,
1703      "supports_attachments": true,
1704      "options": {}
1705    },
1706    {
1707      "id": "openai/gpt-4o",
1708      "name": "OpenAI: GPT-4o",
1709      "cost_per_1m_in": 2.5,
1710      "cost_per_1m_out": 10,
1711      "cost_per_1m_in_cached": 0,
1712      "cost_per_1m_out_cached": 0,
1713      "context_window": 128000,
1714      "default_max_tokens": 8192,
1715      "can_reason": false,
1716      "supports_attachments": true,
1717      "options": {}
1718    },
1719    {
1720      "id": "openai/gpt-4o-2024-05-13",
1721      "name": "OpenAI: GPT-4o (2024-05-13)",
1722      "cost_per_1m_in": 5,
1723      "cost_per_1m_out": 15,
1724      "cost_per_1m_in_cached": 0,
1725      "cost_per_1m_out_cached": 0,
1726      "context_window": 128000,
1727      "default_max_tokens": 2048,
1728      "can_reason": false,
1729      "supports_attachments": true,
1730      "options": {}
1731    },
1732    {
1733      "id": "openai/gpt-4o-2024-08-06",
1734      "name": "OpenAI: GPT-4o (2024-08-06)",
1735      "cost_per_1m_in": 2.5,
1736      "cost_per_1m_out": 10,
1737      "cost_per_1m_in_cached": 0,
1738      "cost_per_1m_out_cached": 1.25,
1739      "context_window": 128000,
1740      "default_max_tokens": 8192,
1741      "can_reason": false,
1742      "supports_attachments": true,
1743      "options": {}
1744    },
1745    {
1746      "id": "openai/gpt-4o-2024-11-20",
1747      "name": "OpenAI: GPT-4o (2024-11-20)",
1748      "cost_per_1m_in": 2.5,
1749      "cost_per_1m_out": 10,
1750      "cost_per_1m_in_cached": 0,
1751      "cost_per_1m_out_cached": 1.25,
1752      "context_window": 128000,
1753      "default_max_tokens": 8192,
1754      "can_reason": false,
1755      "supports_attachments": true,
1756      "options": {}
1757    },
1758    {
1759      "id": "openai/gpt-4o:extended",
1760      "name": "OpenAI: GPT-4o (extended)",
1761      "cost_per_1m_in": 6,
1762      "cost_per_1m_out": 18,
1763      "cost_per_1m_in_cached": 0,
1764      "cost_per_1m_out_cached": 0,
1765      "context_window": 128000,
1766      "default_max_tokens": 32000,
1767      "can_reason": false,
1768      "supports_attachments": true,
1769      "options": {}
1770    },
1771    {
1772      "id": "openai/gpt-4o-audio-preview",
1773      "name": "OpenAI: GPT-4o Audio",
1774      "cost_per_1m_in": 2.5,
1775      "cost_per_1m_out": 10,
1776      "cost_per_1m_in_cached": 0,
1777      "cost_per_1m_out_cached": 0,
1778      "context_window": 128000,
1779      "default_max_tokens": 8192,
1780      "can_reason": false,
1781      "supports_attachments": false,
1782      "options": {}
1783    },
1784    {
1785      "id": "openai/gpt-4o-mini",
1786      "name": "OpenAI: GPT-4o-mini",
1787      "cost_per_1m_in": 0.15,
1788      "cost_per_1m_out": 0.6,
1789      "cost_per_1m_in_cached": 0,
1790      "cost_per_1m_out_cached": 0.075,
1791      "context_window": 128000,
1792      "default_max_tokens": 8192,
1793      "can_reason": false,
1794      "supports_attachments": true,
1795      "options": {}
1796    },
1797    {
1798      "id": "openai/gpt-4o-mini-2024-07-18",
1799      "name": "OpenAI: GPT-4o-mini (2024-07-18)",
1800      "cost_per_1m_in": 0.15,
1801      "cost_per_1m_out": 0.6,
1802      "cost_per_1m_in_cached": 0,
1803      "cost_per_1m_out_cached": 0.075,
1804      "context_window": 128000,
1805      "default_max_tokens": 8192,
1806      "can_reason": false,
1807      "supports_attachments": true,
1808      "options": {}
1809    },
1810    {
1811      "id": "openai/gpt-5",
1812      "name": "OpenAI: GPT-5",
1813      "cost_per_1m_in": 1.25,
1814      "cost_per_1m_out": 10,
1815      "cost_per_1m_in_cached": 0,
1816      "cost_per_1m_out_cached": 0.125,
1817      "context_window": 400000,
1818      "default_max_tokens": 64000,
1819      "can_reason": true,
1820      "reasoning_levels": [
1821        "low",
1822        "medium",
1823        "high"
1824      ],
1825      "default_reasoning_effort": "medium",
1826      "supports_attachments": true,
1827      "options": {}
1828    },
1829    {
1830      "id": "openai/gpt-5-codex",
1831      "name": "OpenAI: GPT-5 Codex",
1832      "cost_per_1m_in": 1.25,
1833      "cost_per_1m_out": 10,
1834      "cost_per_1m_in_cached": 0,
1835      "cost_per_1m_out_cached": 0.125,
1836      "context_window": 400000,
1837      "default_max_tokens": 64000,
1838      "can_reason": true,
1839      "reasoning_levels": [
1840        "low",
1841        "medium",
1842        "high"
1843      ],
1844      "default_reasoning_effort": "medium",
1845      "supports_attachments": true,
1846      "options": {}
1847    },
1848    {
1849      "id": "openai/gpt-5-image",
1850      "name": "OpenAI: GPT-5 Image",
1851      "cost_per_1m_in": 10,
1852      "cost_per_1m_out": 10,
1853      "cost_per_1m_in_cached": 0,
1854      "cost_per_1m_out_cached": 1.25,
1855      "context_window": 400000,
1856      "default_max_tokens": 64000,
1857      "can_reason": true,
1858      "reasoning_levels": [
1859        "low",
1860        "medium",
1861        "high"
1862      ],
1863      "default_reasoning_effort": "medium",
1864      "supports_attachments": true,
1865      "options": {}
1866    },
1867    {
1868      "id": "openai/gpt-5-image-mini",
1869      "name": "OpenAI: GPT-5 Image Mini",
1870      "cost_per_1m_in": 2.5,
1871      "cost_per_1m_out": 2,
1872      "cost_per_1m_in_cached": 0,
1873      "cost_per_1m_out_cached": 0.25,
1874      "context_window": 400000,
1875      "default_max_tokens": 64000,
1876      "can_reason": true,
1877      "reasoning_levels": [
1878        "low",
1879        "medium",
1880        "high"
1881      ],
1882      "default_reasoning_effort": "medium",
1883      "supports_attachments": true,
1884      "options": {}
1885    },
1886    {
1887      "id": "openai/gpt-5-mini",
1888      "name": "OpenAI: GPT-5 Mini",
1889      "cost_per_1m_in": 0.25,
1890      "cost_per_1m_out": 2,
1891      "cost_per_1m_in_cached": 0,
1892      "cost_per_1m_out_cached": 0.03,
1893      "context_window": 400000,
1894      "default_max_tokens": 40000,
1895      "can_reason": true,
1896      "reasoning_levels": [
1897        "low",
1898        "medium",
1899        "high"
1900      ],
1901      "default_reasoning_effort": "medium",
1902      "supports_attachments": true,
1903      "options": {}
1904    },
1905    {
1906      "id": "openai/gpt-5-nano",
1907      "name": "OpenAI: GPT-5 Nano",
1908      "cost_per_1m_in": 0.049999999999999996,
1909      "cost_per_1m_out": 0.39999999999999997,
1910      "cost_per_1m_in_cached": 0,
1911      "cost_per_1m_out_cached": 0.01,
1912      "context_window": 400000,
1913      "default_max_tokens": 40000,
1914      "can_reason": true,
1915      "reasoning_levels": [
1916        "low",
1917        "medium",
1918        "high"
1919      ],
1920      "default_reasoning_effort": "medium",
1921      "supports_attachments": true,
1922      "options": {}
1923    },
1924    {
1925      "id": "openai/gpt-5-pro",
1926      "name": "OpenAI: GPT-5 Pro",
1927      "cost_per_1m_in": 15,
1928      "cost_per_1m_out": 120,
1929      "cost_per_1m_in_cached": 0,
1930      "cost_per_1m_out_cached": 0,
1931      "context_window": 400000,
1932      "default_max_tokens": 64000,
1933      "can_reason": true,
1934      "reasoning_levels": [
1935        "low",
1936        "medium",
1937        "high"
1938      ],
1939      "default_reasoning_effort": "medium",
1940      "supports_attachments": true,
1941      "options": {}
1942    },
1943    {
1944      "id": "openai/gpt-5.1",
1945      "name": "OpenAI: GPT-5.1",
1946      "cost_per_1m_in": 1.25,
1947      "cost_per_1m_out": 10,
1948      "cost_per_1m_in_cached": 0,
1949      "cost_per_1m_out_cached": 0.125,
1950      "context_window": 400000,
1951      "default_max_tokens": 64000,
1952      "can_reason": true,
1953      "reasoning_levels": [
1954        "low",
1955        "medium",
1956        "high"
1957      ],
1958      "default_reasoning_effort": "medium",
1959      "supports_attachments": true,
1960      "options": {}
1961    },
1962    {
1963      "id": "openai/gpt-5.1-chat",
1964      "name": "OpenAI: GPT-5.1 Chat",
1965      "cost_per_1m_in": 1.25,
1966      "cost_per_1m_out": 10,
1967      "cost_per_1m_in_cached": 0,
1968      "cost_per_1m_out_cached": 0.125,
1969      "context_window": 128000,
1970      "default_max_tokens": 8192,
1971      "can_reason": false,
1972      "supports_attachments": true,
1973      "options": {}
1974    },
1975    {
1976      "id": "openai/gpt-5.1-codex",
1977      "name": "OpenAI: GPT-5.1-Codex",
1978      "cost_per_1m_in": 1.25,
1979      "cost_per_1m_out": 10,
1980      "cost_per_1m_in_cached": 0,
1981      "cost_per_1m_out_cached": 0.125,
1982      "context_window": 400000,
1983      "default_max_tokens": 64000,
1984      "can_reason": true,
1985      "reasoning_levels": [
1986        "low",
1987        "medium",
1988        "high"
1989      ],
1990      "default_reasoning_effort": "medium",
1991      "supports_attachments": true,
1992      "options": {}
1993    },
1994    {
1995      "id": "openai/gpt-5.1-codex-mini",
1996      "name": "OpenAI: GPT-5.1-Codex-Mini",
1997      "cost_per_1m_in": 0.25,
1998      "cost_per_1m_out": 2,
1999      "cost_per_1m_in_cached": 0,
2000      "cost_per_1m_out_cached": 0.024999999999999998,
2001      "context_window": 400000,
2002      "default_max_tokens": 50000,
2003      "can_reason": true,
2004      "reasoning_levels": [
2005        "low",
2006        "medium",
2007        "high"
2008      ],
2009      "default_reasoning_effort": "medium",
2010      "supports_attachments": true,
2011      "options": {}
2012    },
2013    {
2014      "id": "openai/gpt-oss-120b",
2015      "name": "OpenAI: gpt-oss-120b",
2016      "cost_per_1m_in": 0.04,
2017      "cost_per_1m_out": 0.19999999999999998,
2018      "cost_per_1m_in_cached": 0,
2019      "cost_per_1m_out_cached": 0,
2020      "context_window": 131072,
2021      "default_max_tokens": 16384,
2022      "can_reason": true,
2023      "reasoning_levels": [
2024        "low",
2025        "medium",
2026        "high"
2027      ],
2028      "default_reasoning_effort": "medium",
2029      "supports_attachments": false,
2030      "options": {}
2031    },
2032    {
2033      "id": "openai/gpt-oss-120b:exacto",
2034      "name": "OpenAI: gpt-oss-120b (exacto)",
2035      "cost_per_1m_in": 0.04,
2036      "cost_per_1m_out": 0.19999999999999998,
2037      "cost_per_1m_in_cached": 0,
2038      "cost_per_1m_out_cached": 0,
2039      "context_window": 131072,
2040      "default_max_tokens": 16384,
2041      "can_reason": true,
2042      "reasoning_levels": [
2043        "low",
2044        "medium",
2045        "high"
2046      ],
2047      "default_reasoning_effort": "medium",
2048      "supports_attachments": false,
2049      "options": {}
2050    },
2051    {
2052      "id": "openai/gpt-oss-20b",
2053      "name": "OpenAI: gpt-oss-20b",
2054      "cost_per_1m_in": 0.03,
2055      "cost_per_1m_out": 0.14,
2056      "cost_per_1m_in_cached": 0,
2057      "cost_per_1m_out_cached": 0,
2058      "context_window": 131072,
2059      "default_max_tokens": 13107,
2060      "can_reason": true,
2061      "reasoning_levels": [
2062        "low",
2063        "medium",
2064        "high"
2065      ],
2066      "default_reasoning_effort": "medium",
2067      "supports_attachments": false,
2068      "options": {}
2069    },
2070    {
2071      "id": "openai/gpt-oss-20b:free",
2072      "name": "OpenAI: gpt-oss-20b (free)",
2073      "cost_per_1m_in": 0,
2074      "cost_per_1m_out": 0,
2075      "cost_per_1m_in_cached": 0,
2076      "cost_per_1m_out_cached": 0,
2077      "context_window": 131072,
2078      "default_max_tokens": 65536,
2079      "can_reason": true,
2080      "reasoning_levels": [
2081        "low",
2082        "medium",
2083        "high"
2084      ],
2085      "default_reasoning_effort": "medium",
2086      "supports_attachments": false,
2087      "options": {}
2088    },
2089    {
2090      "id": "openai/gpt-oss-safeguard-20b",
2091      "name": "OpenAI: gpt-oss-safeguard-20b",
2092      "cost_per_1m_in": 0.075,
2093      "cost_per_1m_out": 0.3,
2094      "cost_per_1m_in_cached": 0,
2095      "cost_per_1m_out_cached": 0.037,
2096      "context_window": 131072,
2097      "default_max_tokens": 32768,
2098      "can_reason": true,
2099      "reasoning_levels": [
2100        "low",
2101        "medium",
2102        "high"
2103      ],
2104      "default_reasoning_effort": "medium",
2105      "supports_attachments": false,
2106      "options": {}
2107    },
2108    {
2109      "id": "openai/o1",
2110      "name": "OpenAI: o1",
2111      "cost_per_1m_in": 15,
2112      "cost_per_1m_out": 60,
2113      "cost_per_1m_in_cached": 0,
2114      "cost_per_1m_out_cached": 7.5,
2115      "context_window": 200000,
2116      "default_max_tokens": 50000,
2117      "can_reason": false,
2118      "supports_attachments": true,
2119      "options": {}
2120    },
2121    {
2122      "id": "openai/o3",
2123      "name": "OpenAI: o3",
2124      "cost_per_1m_in": 2,
2125      "cost_per_1m_out": 8,
2126      "cost_per_1m_in_cached": 0,
2127      "cost_per_1m_out_cached": 0.5,
2128      "context_window": 200000,
2129      "default_max_tokens": 50000,
2130      "can_reason": true,
2131      "reasoning_levels": [
2132        "low",
2133        "medium",
2134        "high"
2135      ],
2136      "default_reasoning_effort": "medium",
2137      "supports_attachments": true,
2138      "options": {}
2139    },
2140    {
2141      "id": "openai/o3-deep-research",
2142      "name": "OpenAI: o3 Deep Research",
2143      "cost_per_1m_in": 10,
2144      "cost_per_1m_out": 40,
2145      "cost_per_1m_in_cached": 0,
2146      "cost_per_1m_out_cached": 2.5,
2147      "context_window": 200000,
2148      "default_max_tokens": 50000,
2149      "can_reason": true,
2150      "reasoning_levels": [
2151        "low",
2152        "medium",
2153        "high"
2154      ],
2155      "default_reasoning_effort": "medium",
2156      "supports_attachments": true,
2157      "options": {}
2158    },
2159    {
2160      "id": "openai/o3-mini",
2161      "name": "OpenAI: o3 Mini",
2162      "cost_per_1m_in": 1.1,
2163      "cost_per_1m_out": 4.4,
2164      "cost_per_1m_in_cached": 0,
2165      "cost_per_1m_out_cached": 0.55,
2166      "context_window": 200000,
2167      "default_max_tokens": 50000,
2168      "can_reason": false,
2169      "supports_attachments": false,
2170      "options": {}
2171    },
2172    {
2173      "id": "openai/o3-mini-high",
2174      "name": "OpenAI: o3 Mini High",
2175      "cost_per_1m_in": 1.1,
2176      "cost_per_1m_out": 4.4,
2177      "cost_per_1m_in_cached": 0,
2178      "cost_per_1m_out_cached": 0.55,
2179      "context_window": 200000,
2180      "default_max_tokens": 50000,
2181      "can_reason": false,
2182      "supports_attachments": false,
2183      "options": {}
2184    },
2185    {
2186      "id": "openai/o3-pro",
2187      "name": "OpenAI: o3 Pro",
2188      "cost_per_1m_in": 20,
2189      "cost_per_1m_out": 80,
2190      "cost_per_1m_in_cached": 0,
2191      "cost_per_1m_out_cached": 0,
2192      "context_window": 200000,
2193      "default_max_tokens": 50000,
2194      "can_reason": true,
2195      "reasoning_levels": [
2196        "low",
2197        "medium",
2198        "high"
2199      ],
2200      "default_reasoning_effort": "medium",
2201      "supports_attachments": true,
2202      "options": {}
2203    },
2204    {
2205      "id": "openai/o4-mini",
2206      "name": "OpenAI: o4 Mini",
2207      "cost_per_1m_in": 1.1,
2208      "cost_per_1m_out": 4.4,
2209      "cost_per_1m_in_cached": 0,
2210      "cost_per_1m_out_cached": 0.275,
2211      "context_window": 200000,
2212      "default_max_tokens": 50000,
2213      "can_reason": true,
2214      "reasoning_levels": [
2215        "low",
2216        "medium",
2217        "high"
2218      ],
2219      "default_reasoning_effort": "medium",
2220      "supports_attachments": true,
2221      "options": {}
2222    },
2223    {
2224      "id": "openai/o4-mini-deep-research",
2225      "name": "OpenAI: o4 Mini Deep Research",
2226      "cost_per_1m_in": 2,
2227      "cost_per_1m_out": 8,
2228      "cost_per_1m_in_cached": 0,
2229      "cost_per_1m_out_cached": 0.5,
2230      "context_window": 200000,
2231      "default_max_tokens": 50000,
2232      "can_reason": true,
2233      "reasoning_levels": [
2234        "low",
2235        "medium",
2236        "high"
2237      ],
2238      "default_reasoning_effort": "medium",
2239      "supports_attachments": true,
2240      "options": {}
2241    },
2242    {
2243      "id": "openai/o4-mini-high",
2244      "name": "OpenAI: o4 Mini High",
2245      "cost_per_1m_in": 1.1,
2246      "cost_per_1m_out": 4.4,
2247      "cost_per_1m_in_cached": 0,
2248      "cost_per_1m_out_cached": 0.275,
2249      "context_window": 200000,
2250      "default_max_tokens": 50000,
2251      "can_reason": true,
2252      "reasoning_levels": [
2253        "low",
2254        "medium",
2255        "high"
2256      ],
2257      "default_reasoning_effort": "medium",
2258      "supports_attachments": true,
2259      "options": {}
2260    },
2261    {
2262      "id": "qwen/qwen-2.5-72b-instruct",
2263      "name": "Qwen2.5 72B Instruct",
2264      "cost_per_1m_in": 0.07,
2265      "cost_per_1m_out": 0.26,
2266      "cost_per_1m_in_cached": 0,
2267      "cost_per_1m_out_cached": 0,
2268      "context_window": 32768,
2269      "default_max_tokens": 16384,
2270      "can_reason": false,
2271      "supports_attachments": false,
2272      "options": {}
2273    },
2274    {
2275      "id": "qwen/qwen-plus-2025-07-28",
2276      "name": "Qwen: Qwen Plus 0728",
2277      "cost_per_1m_in": 0.39999999999999997,
2278      "cost_per_1m_out": 1.2,
2279      "cost_per_1m_in_cached": 0,
2280      "cost_per_1m_out_cached": 0,
2281      "context_window": 1000000,
2282      "default_max_tokens": 16384,
2283      "can_reason": false,
2284      "supports_attachments": false,
2285      "options": {}
2286    },
2287    {
2288      "id": "qwen/qwen-plus-2025-07-28:thinking",
2289      "name": "Qwen: Qwen Plus 0728 (thinking)",
2290      "cost_per_1m_in": 0.39999999999999997,
2291      "cost_per_1m_out": 4,
2292      "cost_per_1m_in_cached": 0,
2293      "cost_per_1m_out_cached": 0,
2294      "context_window": 1000000,
2295      "default_max_tokens": 16384,
2296      "can_reason": true,
2297      "reasoning_levels": [
2298        "low",
2299        "medium",
2300        "high"
2301      ],
2302      "default_reasoning_effort": "medium",
2303      "supports_attachments": false,
2304      "options": {}
2305    },
2306    {
2307      "id": "qwen/qwen-vl-max",
2308      "name": "Qwen: Qwen VL Max",
2309      "cost_per_1m_in": 0.7999999999999999,
2310      "cost_per_1m_out": 3.1999999999999997,
2311      "cost_per_1m_in_cached": 0,
2312      "cost_per_1m_out_cached": 0,
2313      "context_window": 131072,
2314      "default_max_tokens": 4096,
2315      "can_reason": false,
2316      "supports_attachments": true,
2317      "options": {}
2318    },
2319    {
2320      "id": "qwen/qwen-max",
2321      "name": "Qwen: Qwen-Max ",
2322      "cost_per_1m_in": 1.5999999999999999,
2323      "cost_per_1m_out": 6.3999999999999995,
2324      "cost_per_1m_in_cached": 0,
2325      "cost_per_1m_out_cached": 0.64,
2326      "context_window": 32768,
2327      "default_max_tokens": 4096,
2328      "can_reason": false,
2329      "supports_attachments": false,
2330      "options": {}
2331    },
2332    {
2333      "id": "qwen/qwen-plus",
2334      "name": "Qwen: Qwen-Plus",
2335      "cost_per_1m_in": 0.39999999999999997,
2336      "cost_per_1m_out": 1.2,
2337      "cost_per_1m_in_cached": 0,
2338      "cost_per_1m_out_cached": 0.16,
2339      "context_window": 131072,
2340      "default_max_tokens": 4096,
2341      "can_reason": false,
2342      "supports_attachments": false,
2343      "options": {}
2344    },
2345    {
2346      "id": "qwen/qwen-turbo",
2347      "name": "Qwen: Qwen-Turbo",
2348      "cost_per_1m_in": 0.049999999999999996,
2349      "cost_per_1m_out": 0.19999999999999998,
2350      "cost_per_1m_in_cached": 0,
2351      "cost_per_1m_out_cached": 0.02,
2352      "context_window": 1000000,
2353      "default_max_tokens": 4096,
2354      "can_reason": false,
2355      "supports_attachments": false,
2356      "options": {}
2357    },
2358    {
2359      "id": "qwen/qwen3-14b",
2360      "name": "Qwen: Qwen3 14B",
2361      "cost_per_1m_in": 0.08,
2362      "cost_per_1m_out": 0.24,
2363      "cost_per_1m_in_cached": 0,
2364      "cost_per_1m_out_cached": 0,
2365      "context_window": 40960,
2366      "default_max_tokens": 20480,
2367      "can_reason": true,
2368      "reasoning_levels": [
2369        "low",
2370        "medium",
2371        "high"
2372      ],
2373      "default_reasoning_effort": "medium",
2374      "supports_attachments": false,
2375      "options": {}
2376    },
2377    {
2378      "id": "qwen/qwen3-235b-a22b",
2379      "name": "Qwen: Qwen3 235B A22B",
2380      "cost_per_1m_in": 0.22,
2381      "cost_per_1m_out": 0.88,
2382      "cost_per_1m_in_cached": 0,
2383      "cost_per_1m_out_cached": 0,
2384      "context_window": 131072,
2385      "default_max_tokens": 13107,
2386      "can_reason": true,
2387      "reasoning_levels": [
2388        "low",
2389        "medium",
2390        "high"
2391      ],
2392      "default_reasoning_effort": "medium",
2393      "supports_attachments": false,
2394      "options": {}
2395    },
2396    {
2397      "id": "qwen/qwen3-235b-a22b:free",
2398      "name": "Qwen: Qwen3 235B A22B (free)",
2399      "cost_per_1m_in": 0,
2400      "cost_per_1m_out": 0,
2401      "cost_per_1m_in_cached": 0,
2402      "cost_per_1m_out_cached": 0,
2403      "context_window": 131072,
2404      "default_max_tokens": 13107,
2405      "can_reason": true,
2406      "reasoning_levels": [
2407        "low",
2408        "medium",
2409        "high"
2410      ],
2411      "default_reasoning_effort": "medium",
2412      "supports_attachments": false,
2413      "options": {}
2414    },
2415    {
2416      "id": "qwen/qwen3-235b-a22b-2507",
2417      "name": "Qwen: Qwen3 235B A22B Instruct 2507",
2418      "cost_per_1m_in": 0.22,
2419      "cost_per_1m_out": 0.7999999999999999,
2420      "cost_per_1m_in_cached": 0,
2421      "cost_per_1m_out_cached": 0,
2422      "context_window": 262144,
2423      "default_max_tokens": 131072,
2424      "can_reason": true,
2425      "reasoning_levels": [
2426        "low",
2427        "medium",
2428        "high"
2429      ],
2430      "default_reasoning_effort": "medium",
2431      "supports_attachments": false,
2432      "options": {}
2433    },
2434    {
2435      "id": "qwen/qwen3-235b-a22b-thinking-2507",
2436      "name": "Qwen: Qwen3 235B A22B Thinking 2507",
2437      "cost_per_1m_in": 0.13,
2438      "cost_per_1m_out": 0.6,
2439      "cost_per_1m_in_cached": 0,
2440      "cost_per_1m_out_cached": 0,
2441      "context_window": 262144,
2442      "default_max_tokens": 131072,
2443      "can_reason": true,
2444      "reasoning_levels": [
2445        "low",
2446        "medium",
2447        "high"
2448      ],
2449      "default_reasoning_effort": "medium",
2450      "supports_attachments": false,
2451      "options": {}
2452    },
2453    {
2454      "id": "qwen/qwen3-30b-a3b",
2455      "name": "Qwen: Qwen3 30B A3B",
2456      "cost_per_1m_in": 0.08,
2457      "cost_per_1m_out": 0.28,
2458      "cost_per_1m_in_cached": 0,
2459      "cost_per_1m_out_cached": 0,
2460      "context_window": 131072,
2461      "default_max_tokens": 65536,
2462      "can_reason": true,
2463      "reasoning_levels": [
2464        "low",
2465        "medium",
2466        "high"
2467      ],
2468      "default_reasoning_effort": "medium",
2469      "supports_attachments": false,
2470      "options": {}
2471    },
2472    {
2473      "id": "qwen/qwen3-30b-a3b-instruct-2507",
2474      "name": "Qwen: Qwen3 30B A3B Instruct 2507",
2475      "cost_per_1m_in": 0.08,
2476      "cost_per_1m_out": 0.33,
2477      "cost_per_1m_in_cached": 0,
2478      "cost_per_1m_out_cached": 0,
2479      "context_window": 262144,
2480      "default_max_tokens": 131072,
2481      "can_reason": false,
2482      "supports_attachments": false,
2483      "options": {}
2484    },
2485    {
2486      "id": "qwen/qwen3-32b",
2487      "name": "Qwen: Qwen3 32B",
2488      "cost_per_1m_in": 0.15,
2489      "cost_per_1m_out": 0.5,
2490      "cost_per_1m_in_cached": 0,
2491      "cost_per_1m_out_cached": 0,
2492      "context_window": 131072,
2493      "default_max_tokens": 4000,
2494      "can_reason": true,
2495      "reasoning_levels": [
2496        "low",
2497        "medium",
2498        "high"
2499      ],
2500      "default_reasoning_effort": "medium",
2501      "supports_attachments": false,
2502      "options": {}
2503    },
2504    {
2505      "id": "qwen/qwen3-4b:free",
2506      "name": "Qwen: Qwen3 4B (free)",
2507      "cost_per_1m_in": 0,
2508      "cost_per_1m_out": 0,
2509      "cost_per_1m_in_cached": 0,
2510      "cost_per_1m_out_cached": 0,
2511      "context_window": 40960,
2512      "default_max_tokens": 4096,
2513      "can_reason": true,
2514      "reasoning_levels": [
2515        "low",
2516        "medium",
2517        "high"
2518      ],
2519      "default_reasoning_effort": "medium",
2520      "supports_attachments": false,
2521      "options": {}
2522    },
2523    {
2524      "id": "qwen/qwen3-coder-30b-a3b-instruct",
2525      "name": "Qwen: Qwen3 Coder 30B A3B Instruct",
2526      "cost_per_1m_in": 0.06,
2527      "cost_per_1m_out": 0.25,
2528      "cost_per_1m_in_cached": 0,
2529      "cost_per_1m_out_cached": 0,
2530      "context_window": 262144,
2531      "default_max_tokens": 131072,
2532      "can_reason": false,
2533      "supports_attachments": false,
2534      "options": {}
2535    },
2536    {
2537      "id": "qwen/qwen3-coder",
2538      "name": "Qwen: Qwen3 Coder 480B A35B",
2539      "cost_per_1m_in": 0.22,
2540      "cost_per_1m_out": 0.95,
2541      "cost_per_1m_in_cached": 0,
2542      "cost_per_1m_out_cached": 0,
2543      "context_window": 262144,
2544      "default_max_tokens": 131072,
2545      "can_reason": false,
2546      "supports_attachments": false,
2547      "options": {}
2548    },
2549    {
2550      "id": "qwen/qwen3-coder:exacto",
2551      "name": "Qwen: Qwen3 Coder 480B A35B (exacto)",
2552      "cost_per_1m_in": 0.38,
2553      "cost_per_1m_out": 1.53,
2554      "cost_per_1m_in_cached": 0,
2555      "cost_per_1m_out_cached": 0,
2556      "context_window": 262144,
2557      "default_max_tokens": 131072,
2558      "can_reason": true,
2559      "reasoning_levels": [
2560        "low",
2561        "medium",
2562        "high"
2563      ],
2564      "default_reasoning_effort": "medium",
2565      "supports_attachments": false,
2566      "options": {}
2567    },
2568    {
2569      "id": "qwen/qwen3-coder:free",
2570      "name": "Qwen: Qwen3 Coder 480B A35B (free)",
2571      "cost_per_1m_in": 0,
2572      "cost_per_1m_out": 0,
2573      "cost_per_1m_in_cached": 0,
2574      "cost_per_1m_out_cached": 0,
2575      "context_window": 262000,
2576      "default_max_tokens": 131000,
2577      "can_reason": false,
2578      "supports_attachments": false,
2579      "options": {}
2580    },
2581    {
2582      "id": "qwen/qwen3-coder-flash",
2583      "name": "Qwen: Qwen3 Coder Flash",
2584      "cost_per_1m_in": 0.3,
2585      "cost_per_1m_out": 1.5,
2586      "cost_per_1m_in_cached": 0,
2587      "cost_per_1m_out_cached": 0.08,
2588      "context_window": 128000,
2589      "default_max_tokens": 32768,
2590      "can_reason": false,
2591      "supports_attachments": false,
2592      "options": {}
2593    },
2594    {
2595      "id": "qwen/qwen3-coder-plus",
2596      "name": "Qwen: Qwen3 Coder Plus",
2597      "cost_per_1m_in": 1,
2598      "cost_per_1m_out": 5,
2599      "cost_per_1m_in_cached": 0,
2600      "cost_per_1m_out_cached": 0.09999999999999999,
2601      "context_window": 128000,
2602      "default_max_tokens": 32768,
2603      "can_reason": false,
2604      "supports_attachments": false,
2605      "options": {}
2606    },
2607    {
2608      "id": "qwen/qwen3-max",
2609      "name": "Qwen: Qwen3 Max",
2610      "cost_per_1m_in": 1.2,
2611      "cost_per_1m_out": 6,
2612      "cost_per_1m_in_cached": 0,
2613      "cost_per_1m_out_cached": 0.24,
2614      "context_window": 256000,
2615      "default_max_tokens": 16384,
2616      "can_reason": false,
2617      "supports_attachments": false,
2618      "options": {}
2619    },
2620    {
2621      "id": "qwen/qwen3-next-80b-a3b-instruct",
2622      "name": "Qwen: Qwen3 Next 80B A3B Instruct",
2623      "cost_per_1m_in": 0.09999999999999999,
2624      "cost_per_1m_out": 0.7999999999999999,
2625      "cost_per_1m_in_cached": 0,
2626      "cost_per_1m_out_cached": 0,
2627      "context_window": 262144,
2628      "default_max_tokens": 131072,
2629      "can_reason": false,
2630      "supports_attachments": false,
2631      "options": {}
2632    },
2633    {
2634      "id": "qwen/qwen3-next-80b-a3b-thinking",
2635      "name": "Qwen: Qwen3 Next 80B A3B Thinking",
2636      "cost_per_1m_in": 0.15,
2637      "cost_per_1m_out": 1.5,
2638      "cost_per_1m_in_cached": 0,
2639      "cost_per_1m_out_cached": 0,
2640      "context_window": 262144,
2641      "default_max_tokens": 26214,
2642      "can_reason": true,
2643      "reasoning_levels": [
2644        "low",
2645        "medium",
2646        "high"
2647      ],
2648      "default_reasoning_effort": "medium",
2649      "supports_attachments": false,
2650      "options": {}
2651    },
2652    {
2653      "id": "qwen/qwen3-vl-235b-a22b-instruct",
2654      "name": "Qwen: Qwen3 VL 235B A22B Instruct",
2655      "cost_per_1m_in": 0.39999999999999997,
2656      "cost_per_1m_out": 1.5999999999999999,
2657      "cost_per_1m_in_cached": 0,
2658      "cost_per_1m_out_cached": 0,
2659      "context_window": 131072,
2660      "default_max_tokens": 16384,
2661      "can_reason": false,
2662      "supports_attachments": true,
2663      "options": {}
2664    },
2665    {
2666      "id": "qwen/qwen3-vl-235b-a22b-thinking",
2667      "name": "Qwen: Qwen3 VL 235B A22B Thinking",
2668      "cost_per_1m_in": 0.3,
2669      "cost_per_1m_out": 1.2,
2670      "cost_per_1m_in_cached": 0,
2671      "cost_per_1m_out_cached": 0,
2672      "context_window": 262144,
2673      "default_max_tokens": 131072,
2674      "can_reason": true,
2675      "reasoning_levels": [
2676        "low",
2677        "medium",
2678        "high"
2679      ],
2680      "default_reasoning_effort": "medium",
2681      "supports_attachments": true,
2682      "options": {}
2683    },
2684    {
2685      "id": "qwen/qwen3-vl-30b-a3b-instruct",
2686      "name": "Qwen: Qwen3 VL 30B A3B Instruct",
2687      "cost_per_1m_in": 0.29,
2688      "cost_per_1m_out": 1,
2689      "cost_per_1m_in_cached": 0,
2690      "cost_per_1m_out_cached": 0,
2691      "context_window": 262144,
2692      "default_max_tokens": 131072,
2693      "can_reason": false,
2694      "supports_attachments": true,
2695      "options": {}
2696    },
2697    {
2698      "id": "qwen/qwen3-vl-30b-a3b-thinking",
2699      "name": "Qwen: Qwen3 VL 30B A3B Thinking",
2700      "cost_per_1m_in": 0.16,
2701      "cost_per_1m_out": 0.7999999999999999,
2702      "cost_per_1m_in_cached": 0,
2703      "cost_per_1m_out_cached": 0,
2704      "context_window": 131072,
2705      "default_max_tokens": 16384,
2706      "can_reason": true,
2707      "reasoning_levels": [
2708        "low",
2709        "medium",
2710        "high"
2711      ],
2712      "default_reasoning_effort": "medium",
2713      "supports_attachments": true,
2714      "options": {}
2715    },
2716    {
2717      "id": "qwen/qwen3-vl-8b-instruct",
2718      "name": "Qwen: Qwen3 VL 8B Instruct",
2719      "cost_per_1m_in": 0.12,
2720      "cost_per_1m_out": 0.7,
2721      "cost_per_1m_in_cached": 0,
2722      "cost_per_1m_out_cached": 0,
2723      "context_window": 131072,
2724      "default_max_tokens": 13107,
2725      "can_reason": false,
2726      "supports_attachments": true,
2727      "options": {}
2728    },
2729    {
2730      "id": "qwen/qwen3-vl-8b-thinking",
2731      "name": "Qwen: Qwen3 VL 8B Thinking",
2732      "cost_per_1m_in": 0.18,
2733      "cost_per_1m_out": 2.0999999999999996,
2734      "cost_per_1m_in_cached": 0,
2735      "cost_per_1m_out_cached": 0,
2736      "context_window": 256000,
2737      "default_max_tokens": 16384,
2738      "can_reason": true,
2739      "reasoning_levels": [
2740        "low",
2741        "medium",
2742        "high"
2743      ],
2744      "default_reasoning_effort": "medium",
2745      "supports_attachments": true,
2746      "options": {}
2747    },
2748    {
2749      "id": "stepfun-ai/step3",
2750      "name": "StepFun: Step3",
2751      "cost_per_1m_in": 0.5700000000000001,
2752      "cost_per_1m_out": 1.42,
2753      "cost_per_1m_in_cached": 0,
2754      "cost_per_1m_out_cached": 0,
2755      "context_window": 65536,
2756      "default_max_tokens": 32768,
2757      "can_reason": true,
2758      "reasoning_levels": [
2759        "low",
2760        "medium",
2761        "high"
2762      ],
2763      "default_reasoning_effort": "medium",
2764      "supports_attachments": true,
2765      "options": {}
2766    },
2767    {
2768      "id": "tngtech/deepseek-r1t2-chimera",
2769      "name": "TNG: DeepSeek R1T2 Chimera",
2770      "cost_per_1m_in": 0.3,
2771      "cost_per_1m_out": 1.2,
2772      "cost_per_1m_in_cached": 0,
2773      "cost_per_1m_out_cached": 0,
2774      "context_window": 163840,
2775      "default_max_tokens": 81920,
2776      "can_reason": true,
2777      "reasoning_levels": [
2778        "low",
2779        "medium",
2780        "high"
2781      ],
2782      "default_reasoning_effort": "medium",
2783      "supports_attachments": false,
2784      "options": {}
2785    },
2786    {
2787      "id": "tngtech/tng-r1t-chimera",
2788      "name": "TNG: R1T Chimera",
2789      "cost_per_1m_in": 0.3,
2790      "cost_per_1m_out": 1.2,
2791      "cost_per_1m_in_cached": 0,
2792      "cost_per_1m_out_cached": 0,
2793      "context_window": 163840,
2794      "default_max_tokens": 81920,
2795      "can_reason": true,
2796      "reasoning_levels": [
2797        "low",
2798        "medium",
2799        "high"
2800      ],
2801      "default_reasoning_effort": "medium",
2802      "supports_attachments": false,
2803      "options": {}
2804    },
2805    {
2806      "id": "tngtech/tng-r1t-chimera:free",
2807      "name": "TNG: R1T Chimera (free)",
2808      "cost_per_1m_in": 0,
2809      "cost_per_1m_out": 0,
2810      "cost_per_1m_in_cached": 0,
2811      "cost_per_1m_out_cached": 0,
2812      "context_window": 163840,
2813      "default_max_tokens": 81920,
2814      "can_reason": true,
2815      "reasoning_levels": [
2816        "low",
2817        "medium",
2818        "high"
2819      ],
2820      "default_reasoning_effort": "medium",
2821      "supports_attachments": false,
2822      "options": {}
2823    },
2824    {
2825      "id": "thedrummer/rocinante-12b",
2826      "name": "TheDrummer: Rocinante 12B",
2827      "cost_per_1m_in": 0.16999999999999998,
2828      "cost_per_1m_out": 0.43,
2829      "cost_per_1m_in_cached": 0,
2830      "cost_per_1m_out_cached": 0,
2831      "context_window": 32768,
2832      "default_max_tokens": 3276,
2833      "can_reason": false,
2834      "supports_attachments": false,
2835      "options": {}
2836    },
2837    {
2838      "id": "thedrummer/unslopnemo-12b",
2839      "name": "TheDrummer: UnslopNemo 12B",
2840      "cost_per_1m_in": 0.39999999999999997,
2841      "cost_per_1m_out": 0.39999999999999997,
2842      "cost_per_1m_in_cached": 0,
2843      "cost_per_1m_out_cached": 0,
2844      "context_window": 32768,
2845      "default_max_tokens": 3276,
2846      "can_reason": false,
2847      "supports_attachments": false,
2848      "options": {}
2849    },
2850    {
2851      "id": "alibaba/tongyi-deepresearch-30b-a3b",
2852      "name": "Tongyi DeepResearch 30B A3B",
2853      "cost_per_1m_in": 0.09,
2854      "cost_per_1m_out": 0.39999999999999997,
2855      "cost_per_1m_in_cached": 0,
2856      "cost_per_1m_out_cached": 0,
2857      "context_window": 131072,
2858      "default_max_tokens": 65536,
2859      "can_reason": true,
2860      "reasoning_levels": [
2861        "low",
2862        "medium",
2863        "high"
2864      ],
2865      "default_reasoning_effort": "medium",
2866      "supports_attachments": false,
2867      "options": {}
2868    },
2869    {
2870      "id": "alibaba/tongyi-deepresearch-30b-a3b:free",
2871      "name": "Tongyi DeepResearch 30B A3B (free)",
2872      "cost_per_1m_in": 0,
2873      "cost_per_1m_out": 0,
2874      "cost_per_1m_in_cached": 0,
2875      "cost_per_1m_out_cached": 0,
2876      "context_window": 131072,
2877      "default_max_tokens": 65536,
2878      "can_reason": true,
2879      "reasoning_levels": [
2880        "low",
2881        "medium",
2882        "high"
2883      ],
2884      "default_reasoning_effort": "medium",
2885      "supports_attachments": false,
2886      "options": {}
2887    },
2888    {
2889      "id": "z-ai/glm-4-32b",
2890      "name": "Z.AI: GLM 4 32B ",
2891      "cost_per_1m_in": 0.09999999999999999,
2892      "cost_per_1m_out": 0.09999999999999999,
2893      "cost_per_1m_in_cached": 0,
2894      "cost_per_1m_out_cached": 0,
2895      "context_window": 128000,
2896      "default_max_tokens": 12800,
2897      "can_reason": false,
2898      "supports_attachments": false,
2899      "options": {}
2900    },
2901    {
2902      "id": "z-ai/glm-4.5",
2903      "name": "Z.AI: GLM 4.5",
2904      "cost_per_1m_in": 0.48,
2905      "cost_per_1m_out": 1.76,
2906      "cost_per_1m_in_cached": 0,
2907      "cost_per_1m_out_cached": 0.11,
2908      "context_window": 131072,
2909      "default_max_tokens": 49152,
2910      "can_reason": true,
2911      "reasoning_levels": [
2912        "low",
2913        "medium",
2914        "high"
2915      ],
2916      "default_reasoning_effort": "medium",
2917      "supports_attachments": false,
2918      "options": {}
2919    },
2920    {
2921      "id": "z-ai/glm-4.5-air",
2922      "name": "Z.AI: GLM 4.5 Air",
2923      "cost_per_1m_in": 0.14,
2924      "cost_per_1m_out": 0.86,
2925      "cost_per_1m_in_cached": 0,
2926      "cost_per_1m_out_cached": 0,
2927      "context_window": 131072,
2928      "default_max_tokens": 65536,
2929      "can_reason": true,
2930      "reasoning_levels": [
2931        "low",
2932        "medium",
2933        "high"
2934      ],
2935      "default_reasoning_effort": "medium",
2936      "supports_attachments": false,
2937      "options": {}
2938    },
2939    {
2940      "id": "z-ai/glm-4.5-air:free",
2941      "name": "Z.AI: GLM 4.5 Air (free)",
2942      "cost_per_1m_in": 0,
2943      "cost_per_1m_out": 0,
2944      "cost_per_1m_in_cached": 0,
2945      "cost_per_1m_out_cached": 0,
2946      "context_window": 131072,
2947      "default_max_tokens": 48000,
2948      "can_reason": true,
2949      "reasoning_levels": [
2950        "low",
2951        "medium",
2952        "high"
2953      ],
2954      "default_reasoning_effort": "medium",
2955      "supports_attachments": false,
2956      "options": {}
2957    },
2958    {
2959      "id": "z-ai/glm-4.5v",
2960      "name": "Z.AI: GLM 4.5V",
2961      "cost_per_1m_in": 0.48,
2962      "cost_per_1m_out": 1.44,
2963      "cost_per_1m_in_cached": 0,
2964      "cost_per_1m_out_cached": 0.11,
2965      "context_window": 65536,
2966      "default_max_tokens": 8192,
2967      "can_reason": true,
2968      "reasoning_levels": [
2969        "low",
2970        "medium",
2971        "high"
2972      ],
2973      "default_reasoning_effort": "medium",
2974      "supports_attachments": true,
2975      "options": {}
2976    },
2977    {
2978      "id": "z-ai/glm-4.6",
2979      "name": "Z.AI: GLM 4.6",
2980      "cost_per_1m_in": 0.5,
2981      "cost_per_1m_out": 1.9,
2982      "cost_per_1m_in_cached": 0,
2983      "cost_per_1m_out_cached": 0,
2984      "context_window": 204800,
2985      "default_max_tokens": 102400,
2986      "can_reason": true,
2987      "reasoning_levels": [
2988        "low",
2989        "medium",
2990        "high"
2991      ],
2992      "default_reasoning_effort": "medium",
2993      "supports_attachments": false,
2994      "options": {}
2995    },
2996    {
2997      "id": "z-ai/glm-4.6:exacto",
2998      "name": "Z.AI: GLM 4.6 (exacto)",
2999      "cost_per_1m_in": 0.48,
3000      "cost_per_1m_out": 1.76,
3001      "cost_per_1m_in_cached": 0,
3002      "cost_per_1m_out_cached": 0,
3003      "context_window": 204800,
3004      "default_max_tokens": 65536,
3005      "can_reason": true,
3006      "reasoning_levels": [
3007        "low",
3008        "medium",
3009        "high"
3010      ],
3011      "default_reasoning_effort": "medium",
3012      "supports_attachments": false,
3013      "options": {}
3014    },
3015    {
3016      "id": "x-ai/grok-3",
3017      "name": "xAI: Grok 3",
3018      "cost_per_1m_in": 3,
3019      "cost_per_1m_out": 15,
3020      "cost_per_1m_in_cached": 0,
3021      "cost_per_1m_out_cached": 0.75,
3022      "context_window": 131072,
3023      "default_max_tokens": 13107,
3024      "can_reason": false,
3025      "supports_attachments": false,
3026      "options": {}
3027    },
3028    {
3029      "id": "x-ai/grok-3-beta",
3030      "name": "xAI: Grok 3 Beta",
3031      "cost_per_1m_in": 3,
3032      "cost_per_1m_out": 15,
3033      "cost_per_1m_in_cached": 0,
3034      "cost_per_1m_out_cached": 0.75,
3035      "context_window": 131072,
3036      "default_max_tokens": 13107,
3037      "can_reason": false,
3038      "supports_attachments": false,
3039      "options": {}
3040    },
3041    {
3042      "id": "x-ai/grok-3-mini",
3043      "name": "xAI: Grok 3 Mini",
3044      "cost_per_1m_in": 0.3,
3045      "cost_per_1m_out": 0.5,
3046      "cost_per_1m_in_cached": 0,
3047      "cost_per_1m_out_cached": 0.075,
3048      "context_window": 131072,
3049      "default_max_tokens": 13107,
3050      "can_reason": true,
3051      "reasoning_levels": [
3052        "low",
3053        "medium",
3054        "high"
3055      ],
3056      "default_reasoning_effort": "medium",
3057      "supports_attachments": false,
3058      "options": {}
3059    },
3060    {
3061      "id": "x-ai/grok-3-mini-beta",
3062      "name": "xAI: Grok 3 Mini Beta",
3063      "cost_per_1m_in": 0.3,
3064      "cost_per_1m_out": 0.5,
3065      "cost_per_1m_in_cached": 0,
3066      "cost_per_1m_out_cached": 0.075,
3067      "context_window": 131072,
3068      "default_max_tokens": 13107,
3069      "can_reason": true,
3070      "reasoning_levels": [
3071        "low",
3072        "medium",
3073        "high"
3074      ],
3075      "default_reasoning_effort": "medium",
3076      "supports_attachments": false,
3077      "options": {}
3078    },
3079    {
3080      "id": "x-ai/grok-4",
3081      "name": "xAI: Grok 4",
3082      "cost_per_1m_in": 3,
3083      "cost_per_1m_out": 15,
3084      "cost_per_1m_in_cached": 0,
3085      "cost_per_1m_out_cached": 0.75,
3086      "context_window": 256000,
3087      "default_max_tokens": 25600,
3088      "can_reason": true,
3089      "reasoning_levels": [
3090        "low",
3091        "medium",
3092        "high"
3093      ],
3094      "default_reasoning_effort": "medium",
3095      "supports_attachments": true,
3096      "options": {}
3097    },
3098    {
3099      "id": "x-ai/grok-4-fast",
3100      "name": "xAI: Grok 4 Fast",
3101      "cost_per_1m_in": 0.19999999999999998,
3102      "cost_per_1m_out": 0.5,
3103      "cost_per_1m_in_cached": 0,
3104      "cost_per_1m_out_cached": 0.049999999999999996,
3105      "context_window": 2000000,
3106      "default_max_tokens": 15000,
3107      "can_reason": true,
3108      "reasoning_levels": [
3109        "low",
3110        "medium",
3111        "high"
3112      ],
3113      "default_reasoning_effort": "medium",
3114      "supports_attachments": true,
3115      "options": {}
3116    },
3117    {
3118      "id": "x-ai/grok-4.1-fast:free",
3119      "name": "xAI: Grok 4.1 Fast (free)",
3120      "cost_per_1m_in": 0,
3121      "cost_per_1m_out": 0,
3122      "cost_per_1m_in_cached": 0,
3123      "cost_per_1m_out_cached": 0,
3124      "context_window": 2000000,
3125      "default_max_tokens": 15000,
3126      "can_reason": true,
3127      "reasoning_levels": [
3128        "low",
3129        "medium",
3130        "high"
3131      ],
3132      "default_reasoning_effort": "medium",
3133      "supports_attachments": true,
3134      "options": {}
3135    },
3136    {
3137      "id": "x-ai/grok-code-fast-1",
3138      "name": "xAI: Grok Code Fast 1",
3139      "cost_per_1m_in": 0.19999999999999998,
3140      "cost_per_1m_out": 1.5,
3141      "cost_per_1m_in_cached": 0,
3142      "cost_per_1m_out_cached": 0.02,
3143      "context_window": 256000,
3144      "default_max_tokens": 5000,
3145      "can_reason": true,
3146      "reasoning_levels": [
3147        "low",
3148        "medium",
3149        "high"
3150      ],
3151      "default_reasoning_effort": "medium",
3152      "supports_attachments": false,
3153      "options": {}
3154    }
3155  ],
3156  "default_headers": {
3157    "HTTP-Referer": "https://charm.land",
3158    "X-Title": "Crush"
3159  }
3160}