openrouter.json

   1{
   2  "name": "OpenRouter",
   3  "id": "openrouter",
   4  "api_key": "$OPENROUTER_API_KEY",
   5  "api_endpoint": "https://openrouter.ai/api/v1",
   6  "type": "openrouter",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-3.5-haiku",
   9  "models": [
  10    {
  11      "id": "ai21/jamba-large-1.7",
  12      "name": "AI21: Jamba Large 1.7",
  13      "cost_per_1m_in": 2,
  14      "cost_per_1m_out": 8,
  15      "cost_per_1m_in_cached": 0,
  16      "cost_per_1m_out_cached": 0,
  17      "context_window": 256000,
  18      "default_max_tokens": 2048,
  19      "can_reason": false,
  20      "supports_attachments": false,
  21      "options": {}
  22    },
  23    {
  24      "id": "ai21/jamba-mini-1.7",
  25      "name": "AI21: Jamba Mini 1.7",
  26      "cost_per_1m_in": 0.19999999999999998,
  27      "cost_per_1m_out": 0.39999999999999997,
  28      "cost_per_1m_in_cached": 0,
  29      "cost_per_1m_out_cached": 0,
  30      "context_window": 256000,
  31      "default_max_tokens": 2048,
  32      "can_reason": false,
  33      "supports_attachments": false,
  34      "options": {}
  35    },
  36    {
  37      "id": "amazon/nova-lite-v1",
  38      "name": "Amazon: Nova Lite 1.0",
  39      "cost_per_1m_in": 0.06,
  40      "cost_per_1m_out": 0.24,
  41      "cost_per_1m_in_cached": 0,
  42      "cost_per_1m_out_cached": 0,
  43      "context_window": 300000,
  44      "default_max_tokens": 2560,
  45      "can_reason": false,
  46      "supports_attachments": true,
  47      "options": {}
  48    },
  49    {
  50      "id": "amazon/nova-micro-v1",
  51      "name": "Amazon: Nova Micro 1.0",
  52      "cost_per_1m_in": 0.035,
  53      "cost_per_1m_out": 0.14,
  54      "cost_per_1m_in_cached": 0,
  55      "cost_per_1m_out_cached": 0,
  56      "context_window": 128000,
  57      "default_max_tokens": 2560,
  58      "can_reason": false,
  59      "supports_attachments": false,
  60      "options": {}
  61    },
  62    {
  63      "id": "amazon/nova-premier-v1",
  64      "name": "Amazon: Nova Premier 1.0",
  65      "cost_per_1m_in": 2.5,
  66      "cost_per_1m_out": 12.5,
  67      "cost_per_1m_in_cached": 0,
  68      "cost_per_1m_out_cached": 0.625,
  69      "context_window": 1000000,
  70      "default_max_tokens": 16000,
  71      "can_reason": false,
  72      "supports_attachments": true,
  73      "options": {}
  74    },
  75    {
  76      "id": "amazon/nova-pro-v1",
  77      "name": "Amazon: Nova Pro 1.0",
  78      "cost_per_1m_in": 0.7999999999999999,
  79      "cost_per_1m_out": 3.1999999999999997,
  80      "cost_per_1m_in_cached": 0,
  81      "cost_per_1m_out_cached": 0,
  82      "context_window": 300000,
  83      "default_max_tokens": 2560,
  84      "can_reason": false,
  85      "supports_attachments": true,
  86      "options": {}
  87    },
  88    {
  89      "id": "anthropic/claude-3-haiku",
  90      "name": "Anthropic: Claude 3 Haiku",
  91      "cost_per_1m_in": 0.25,
  92      "cost_per_1m_out": 1.25,
  93      "cost_per_1m_in_cached": 0.3,
  94      "cost_per_1m_out_cached": 0.03,
  95      "context_window": 200000,
  96      "default_max_tokens": 2048,
  97      "can_reason": false,
  98      "supports_attachments": true,
  99      "options": {}
 100    },
 101    {
 102      "id": "anthropic/claude-3-opus",
 103      "name": "Anthropic: Claude 3 Opus",
 104      "cost_per_1m_in": 15,
 105      "cost_per_1m_out": 75,
 106      "cost_per_1m_in_cached": 18.75,
 107      "cost_per_1m_out_cached": 1.5,
 108      "context_window": 200000,
 109      "default_max_tokens": 2048,
 110      "can_reason": false,
 111      "supports_attachments": true,
 112      "options": {}
 113    },
 114    {
 115      "id": "anthropic/claude-3.5-haiku",
 116      "name": "Anthropic: Claude 3.5 Haiku",
 117      "cost_per_1m_in": 0.7999999999999999,
 118      "cost_per_1m_out": 4,
 119      "cost_per_1m_in_cached": 1,
 120      "cost_per_1m_out_cached": 0.08,
 121      "context_window": 200000,
 122      "default_max_tokens": 4096,
 123      "can_reason": false,
 124      "supports_attachments": true,
 125      "options": {}
 126    },
 127    {
 128      "id": "anthropic/claude-3.5-haiku-20241022",
 129      "name": "Anthropic: Claude 3.5 Haiku (2024-10-22)",
 130      "cost_per_1m_in": 0.7999999999999999,
 131      "cost_per_1m_out": 4,
 132      "cost_per_1m_in_cached": 1,
 133      "cost_per_1m_out_cached": 0.08,
 134      "context_window": 200000,
 135      "default_max_tokens": 4096,
 136      "can_reason": false,
 137      "supports_attachments": true,
 138      "options": {}
 139    },
 140    {
 141      "id": "anthropic/claude-3.5-sonnet",
 142      "name": "Anthropic: Claude 3.5 Sonnet",
 143      "cost_per_1m_in": 3,
 144      "cost_per_1m_out": 15,
 145      "cost_per_1m_in_cached": 3.75,
 146      "cost_per_1m_out_cached": 0.3,
 147      "context_window": 200000,
 148      "default_max_tokens": 4096,
 149      "can_reason": false,
 150      "supports_attachments": true,
 151      "options": {}
 152    },
 153    {
 154      "id": "anthropic/claude-3.7-sonnet",
 155      "name": "Anthropic: Claude 3.7 Sonnet",
 156      "cost_per_1m_in": 3,
 157      "cost_per_1m_out": 15,
 158      "cost_per_1m_in_cached": 3.75,
 159      "cost_per_1m_out_cached": 0.3,
 160      "context_window": 200000,
 161      "default_max_tokens": 64000,
 162      "can_reason": true,
 163      "reasoning_levels": [
 164        "low",
 165        "medium",
 166        "high"
 167      ],
 168      "default_reasoning_effort": "medium",
 169      "supports_attachments": true,
 170      "options": {}
 171    },
 172    {
 173      "id": "anthropic/claude-3.7-sonnet:thinking",
 174      "name": "Anthropic: Claude 3.7 Sonnet (thinking)",
 175      "cost_per_1m_in": 3,
 176      "cost_per_1m_out": 15,
 177      "cost_per_1m_in_cached": 3.75,
 178      "cost_per_1m_out_cached": 0.3,
 179      "context_window": 200000,
 180      "default_max_tokens": 32000,
 181      "can_reason": true,
 182      "reasoning_levels": [
 183        "low",
 184        "medium",
 185        "high"
 186      ],
 187      "default_reasoning_effort": "medium",
 188      "supports_attachments": true,
 189      "options": {}
 190    },
 191    {
 192      "id": "anthropic/claude-haiku-4.5",
 193      "name": "Anthropic: Claude Haiku 4.5",
 194      "cost_per_1m_in": 1,
 195      "cost_per_1m_out": 5,
 196      "cost_per_1m_in_cached": 1.25,
 197      "cost_per_1m_out_cached": 0.09999999999999999,
 198      "context_window": 200000,
 199      "default_max_tokens": 32000,
 200      "can_reason": true,
 201      "reasoning_levels": [
 202        "low",
 203        "medium",
 204        "high"
 205      ],
 206      "default_reasoning_effort": "medium",
 207      "supports_attachments": true,
 208      "options": {}
 209    },
 210    {
 211      "id": "anthropic/claude-opus-4",
 212      "name": "Anthropic: Claude Opus 4",
 213      "cost_per_1m_in": 15,
 214      "cost_per_1m_out": 75,
 215      "cost_per_1m_in_cached": 18.75,
 216      "cost_per_1m_out_cached": 1.5,
 217      "context_window": 200000,
 218      "default_max_tokens": 16000,
 219      "can_reason": true,
 220      "reasoning_levels": [
 221        "low",
 222        "medium",
 223        "high"
 224      ],
 225      "default_reasoning_effort": "medium",
 226      "supports_attachments": true,
 227      "options": {}
 228    },
 229    {
 230      "id": "anthropic/claude-opus-4.1",
 231      "name": "Anthropic: Claude Opus 4.1",
 232      "cost_per_1m_in": 15,
 233      "cost_per_1m_out": 75,
 234      "cost_per_1m_in_cached": 18.75,
 235      "cost_per_1m_out_cached": 1.5,
 236      "context_window": 200000,
 237      "default_max_tokens": 16000,
 238      "can_reason": true,
 239      "reasoning_levels": [
 240        "low",
 241        "medium",
 242        "high"
 243      ],
 244      "default_reasoning_effort": "medium",
 245      "supports_attachments": true,
 246      "options": {}
 247    },
 248    {
 249      "id": "anthropic/claude-sonnet-4",
 250      "name": "Anthropic: Claude Sonnet 4",
 251      "cost_per_1m_in": 3,
 252      "cost_per_1m_out": 15,
 253      "cost_per_1m_in_cached": 3.75,
 254      "cost_per_1m_out_cached": 0.3,
 255      "context_window": 1000000,
 256      "default_max_tokens": 32000,
 257      "can_reason": true,
 258      "reasoning_levels": [
 259        "low",
 260        "medium",
 261        "high"
 262      ],
 263      "default_reasoning_effort": "medium",
 264      "supports_attachments": true,
 265      "options": {}
 266    },
 267    {
 268      "id": "anthropic/claude-sonnet-4.5",
 269      "name": "Anthropic: Claude Sonnet 4.5",
 270      "cost_per_1m_in": 3,
 271      "cost_per_1m_out": 15,
 272      "cost_per_1m_in_cached": 3.75,
 273      "cost_per_1m_out_cached": 0.3,
 274      "context_window": 1000000,
 275      "default_max_tokens": 32000,
 276      "can_reason": true,
 277      "reasoning_levels": [
 278        "low",
 279        "medium",
 280        "high"
 281      ],
 282      "default_reasoning_effort": "medium",
 283      "supports_attachments": true,
 284      "options": {}
 285    },
 286    {
 287      "id": "arcee-ai/virtuoso-large",
 288      "name": "Arcee AI: Virtuoso Large",
 289      "cost_per_1m_in": 0.75,
 290      "cost_per_1m_out": 1.2,
 291      "cost_per_1m_in_cached": 0,
 292      "cost_per_1m_out_cached": 0,
 293      "context_window": 131072,
 294      "default_max_tokens": 32000,
 295      "can_reason": false,
 296      "supports_attachments": false,
 297      "options": {}
 298    },
 299    {
 300      "id": "baidu/ernie-4.5-21b-a3b",
 301      "name": "Baidu: ERNIE 4.5 21B A3B",
 302      "cost_per_1m_in": 0.07,
 303      "cost_per_1m_out": 0.28,
 304      "cost_per_1m_in_cached": 0,
 305      "cost_per_1m_out_cached": 0,
 306      "context_window": 120000,
 307      "default_max_tokens": 4000,
 308      "can_reason": false,
 309      "supports_attachments": false,
 310      "options": {}
 311    },
 312    {
 313      "id": "baidu/ernie-4.5-vl-28b-a3b",
 314      "name": "Baidu: ERNIE 4.5 VL 28B A3B",
 315      "cost_per_1m_in": 0.14,
 316      "cost_per_1m_out": 0.56,
 317      "cost_per_1m_in_cached": 0,
 318      "cost_per_1m_out_cached": 0,
 319      "context_window": 30000,
 320      "default_max_tokens": 4000,
 321      "can_reason": true,
 322      "reasoning_levels": [
 323        "low",
 324        "medium",
 325        "high"
 326      ],
 327      "default_reasoning_effort": "medium",
 328      "supports_attachments": true,
 329      "options": {}
 330    },
 331    {
 332      "id": "deepcogito/cogito-v2-preview-llama-109b-moe",
 333      "name": "Cogito V2 Preview Llama 109B",
 334      "cost_per_1m_in": 0.18,
 335      "cost_per_1m_out": 0.59,
 336      "cost_per_1m_in_cached": 0,
 337      "cost_per_1m_out_cached": 0,
 338      "context_window": 32767,
 339      "default_max_tokens": 3276,
 340      "can_reason": true,
 341      "reasoning_levels": [
 342        "low",
 343        "medium",
 344        "high"
 345      ],
 346      "default_reasoning_effort": "medium",
 347      "supports_attachments": true,
 348      "options": {}
 349    },
 350    {
 351      "id": "cohere/command-r-08-2024",
 352      "name": "Cohere: Command R (08-2024)",
 353      "cost_per_1m_in": 0.15,
 354      "cost_per_1m_out": 0.6,
 355      "cost_per_1m_in_cached": 0,
 356      "cost_per_1m_out_cached": 0,
 357      "context_window": 128000,
 358      "default_max_tokens": 2000,
 359      "can_reason": false,
 360      "supports_attachments": false,
 361      "options": {}
 362    },
 363    {
 364      "id": "cohere/command-r-plus-08-2024",
 365      "name": "Cohere: Command R+ (08-2024)",
 366      "cost_per_1m_in": 2.5,
 367      "cost_per_1m_out": 10,
 368      "cost_per_1m_in_cached": 0,
 369      "cost_per_1m_out_cached": 0,
 370      "context_window": 128000,
 371      "default_max_tokens": 2000,
 372      "can_reason": false,
 373      "supports_attachments": false,
 374      "options": {}
 375    },
 376    {
 377      "id": "deepcogito/cogito-v2-preview-llama-405b",
 378      "name": "Deep Cogito: Cogito V2 Preview Llama 405B",
 379      "cost_per_1m_in": 3.5,
 380      "cost_per_1m_out": 3.5,
 381      "cost_per_1m_in_cached": 0,
 382      "cost_per_1m_out_cached": 0,
 383      "context_window": 32768,
 384      "default_max_tokens": 3276,
 385      "can_reason": true,
 386      "reasoning_levels": [
 387        "low",
 388        "medium",
 389        "high"
 390      ],
 391      "default_reasoning_effort": "medium",
 392      "supports_attachments": false,
 393      "options": {}
 394    },
 395    {
 396      "id": "deepcogito/cogito-v2-preview-llama-70b",
 397      "name": "Deep Cogito: Cogito V2 Preview Llama 70B",
 398      "cost_per_1m_in": 0.88,
 399      "cost_per_1m_out": 0.88,
 400      "cost_per_1m_in_cached": 0,
 401      "cost_per_1m_out_cached": 0,
 402      "context_window": 32768,
 403      "default_max_tokens": 3276,
 404      "can_reason": true,
 405      "reasoning_levels": [
 406        "low",
 407        "medium",
 408        "high"
 409      ],
 410      "default_reasoning_effort": "medium",
 411      "supports_attachments": false,
 412      "options": {}
 413    },
 414    {
 415      "id": "deepseek/deepseek-chat",
 416      "name": "DeepSeek: DeepSeek V3",
 417      "cost_per_1m_in": 0.39999999999999997,
 418      "cost_per_1m_out": 1.3,
 419      "cost_per_1m_in_cached": 0,
 420      "cost_per_1m_out_cached": 0,
 421      "context_window": 64000,
 422      "default_max_tokens": 8000,
 423      "can_reason": false,
 424      "supports_attachments": false,
 425      "options": {}
 426    },
 427    {
 428      "id": "deepseek/deepseek-chat-v3-0324",
 429      "name": "DeepSeek: DeepSeek V3 0324",
 430      "cost_per_1m_in": 0.27,
 431      "cost_per_1m_out": 1.12,
 432      "cost_per_1m_in_cached": 0,
 433      "cost_per_1m_out_cached": 0.135,
 434      "context_window": 163840,
 435      "default_max_tokens": 81920,
 436      "can_reason": false,
 437      "supports_attachments": false,
 438      "options": {}
 439    },
 440    {
 441      "id": "deepseek/deepseek-chat-v3-0324:free",
 442      "name": "DeepSeek: DeepSeek V3 0324 (free)",
 443      "cost_per_1m_in": 0,
 444      "cost_per_1m_out": 0,
 445      "cost_per_1m_in_cached": 0,
 446      "cost_per_1m_out_cached": 0,
 447      "context_window": 163840,
 448      "default_max_tokens": 16384,
 449      "can_reason": false,
 450      "supports_attachments": false,
 451      "options": {}
 452    },
 453    {
 454      "id": "deepseek/deepseek-chat-v3.1",
 455      "name": "DeepSeek: DeepSeek V3.1",
 456      "cost_per_1m_in": 0.56,
 457      "cost_per_1m_out": 1.68,
 458      "cost_per_1m_in_cached": 0,
 459      "cost_per_1m_out_cached": 0,
 460      "context_window": 163840,
 461      "default_max_tokens": 16384,
 462      "can_reason": true,
 463      "reasoning_levels": [
 464        "low",
 465        "medium",
 466        "high"
 467      ],
 468      "default_reasoning_effort": "medium",
 469      "supports_attachments": false,
 470      "options": {}
 471    },
 472    {
 473      "id": "deepseek/deepseek-v3.1-terminus",
 474      "name": "DeepSeek: DeepSeek V3.1 Terminus",
 475      "cost_per_1m_in": 0.27,
 476      "cost_per_1m_out": 1,
 477      "cost_per_1m_in_cached": 0,
 478      "cost_per_1m_out_cached": 0,
 479      "context_window": 163840,
 480      "default_max_tokens": 16384,
 481      "can_reason": true,
 482      "reasoning_levels": [
 483        "low",
 484        "medium",
 485        "high"
 486      ],
 487      "default_reasoning_effort": "medium",
 488      "supports_attachments": false,
 489      "options": {}
 490    },
 491    {
 492      "id": "deepseek/deepseek-v3.1-terminus:exacto",
 493      "name": "DeepSeek: DeepSeek V3.1 Terminus (exacto)",
 494      "cost_per_1m_in": 0.27,
 495      "cost_per_1m_out": 1,
 496      "cost_per_1m_in_cached": 0,
 497      "cost_per_1m_out_cached": 0,
 498      "context_window": 163840,
 499      "default_max_tokens": 16384,
 500      "can_reason": true,
 501      "reasoning_levels": [
 502        "low",
 503        "medium",
 504        "high"
 505      ],
 506      "default_reasoning_effort": "medium",
 507      "supports_attachments": false,
 508      "options": {}
 509    },
 510    {
 511      "id": "deepseek/deepseek-v3.2-exp",
 512      "name": "DeepSeek: DeepSeek V3.2 Exp",
 513      "cost_per_1m_in": 0.27,
 514      "cost_per_1m_out": 0.41,
 515      "cost_per_1m_in_cached": 0,
 516      "cost_per_1m_out_cached": 0,
 517      "context_window": 163840,
 518      "default_max_tokens": 32768,
 519      "can_reason": true,
 520      "reasoning_levels": [
 521        "low",
 522        "medium",
 523        "high"
 524      ],
 525      "default_reasoning_effort": "medium",
 526      "supports_attachments": false,
 527      "options": {}
 528    },
 529    {
 530      "id": "deepseek/deepseek-r1",
 531      "name": "DeepSeek: R1",
 532      "cost_per_1m_in": 0.7,
 533      "cost_per_1m_out": 2.4,
 534      "cost_per_1m_in_cached": 0,
 535      "cost_per_1m_out_cached": 0,
 536      "context_window": 163840,
 537      "default_max_tokens": 81920,
 538      "can_reason": true,
 539      "reasoning_levels": [
 540        "low",
 541        "medium",
 542        "high"
 543      ],
 544      "default_reasoning_effort": "medium",
 545      "supports_attachments": false,
 546      "options": {}
 547    },
 548    {
 549      "id": "deepseek/deepseek-r1-0528",
 550      "name": "DeepSeek: R1 0528",
 551      "cost_per_1m_in": 0.39999999999999997,
 552      "cost_per_1m_out": 1.75,
 553      "cost_per_1m_in_cached": 0,
 554      "cost_per_1m_out_cached": 0,
 555      "context_window": 163840,
 556      "default_max_tokens": 81920,
 557      "can_reason": true,
 558      "reasoning_levels": [
 559        "low",
 560        "medium",
 561        "high"
 562      ],
 563      "default_reasoning_effort": "medium",
 564      "supports_attachments": false,
 565      "options": {}
 566    },
 567    {
 568      "id": "google/gemini-2.0-flash-001",
 569      "name": "Google: Gemini 2.0 Flash",
 570      "cost_per_1m_in": 0.09999999999999999,
 571      "cost_per_1m_out": 0.39999999999999997,
 572      "cost_per_1m_in_cached": 0.18330000000000002,
 573      "cost_per_1m_out_cached": 0.024999999999999998,
 574      "context_window": 1048576,
 575      "default_max_tokens": 4096,
 576      "can_reason": false,
 577      "supports_attachments": true,
 578      "options": {}
 579    },
 580    {
 581      "id": "google/gemini-2.0-flash-exp:free",
 582      "name": "Google: Gemini 2.0 Flash Experimental (free)",
 583      "cost_per_1m_in": 0,
 584      "cost_per_1m_out": 0,
 585      "cost_per_1m_in_cached": 0,
 586      "cost_per_1m_out_cached": 0,
 587      "context_window": 1048576,
 588      "default_max_tokens": 4096,
 589      "can_reason": false,
 590      "supports_attachments": true,
 591      "options": {}
 592    },
 593    {
 594      "id": "google/gemini-2.0-flash-lite-001",
 595      "name": "Google: Gemini 2.0 Flash Lite",
 596      "cost_per_1m_in": 0.075,
 597      "cost_per_1m_out": 0.3,
 598      "cost_per_1m_in_cached": 0,
 599      "cost_per_1m_out_cached": 0,
 600      "context_window": 1048576,
 601      "default_max_tokens": 4096,
 602      "can_reason": false,
 603      "supports_attachments": true,
 604      "options": {}
 605    },
 606    {
 607      "id": "google/gemini-2.5-flash",
 608      "name": "Google: Gemini 2.5 Flash",
 609      "cost_per_1m_in": 0.3,
 610      "cost_per_1m_out": 2.5,
 611      "cost_per_1m_in_cached": 0.3833,
 612      "cost_per_1m_out_cached": 0.03,
 613      "context_window": 1048576,
 614      "default_max_tokens": 32767,
 615      "can_reason": true,
 616      "reasoning_levels": [
 617        "low",
 618        "medium",
 619        "high"
 620      ],
 621      "default_reasoning_effort": "medium",
 622      "supports_attachments": true,
 623      "options": {}
 624    },
 625    {
 626      "id": "google/gemini-2.5-flash-lite",
 627      "name": "Google: Gemini 2.5 Flash Lite",
 628      "cost_per_1m_in": 0.09999999999999999,
 629      "cost_per_1m_out": 0.39999999999999997,
 630      "cost_per_1m_in_cached": 0.18330000000000002,
 631      "cost_per_1m_out_cached": 0.024999999999999998,
 632      "context_window": 1048576,
 633      "default_max_tokens": 32767,
 634      "can_reason": true,
 635      "reasoning_levels": [
 636        "low",
 637        "medium",
 638        "high"
 639      ],
 640      "default_reasoning_effort": "medium",
 641      "supports_attachments": true,
 642      "options": {}
 643    },
 644    {
 645      "id": "google/gemini-2.5-flash-lite-preview-06-17",
 646      "name": "Google: Gemini 2.5 Flash Lite Preview 06-17",
 647      "cost_per_1m_in": 0.09999999999999999,
 648      "cost_per_1m_out": 0.39999999999999997,
 649      "cost_per_1m_in_cached": 0.18330000000000002,
 650      "cost_per_1m_out_cached": 0.024999999999999998,
 651      "context_window": 1048576,
 652      "default_max_tokens": 32767,
 653      "can_reason": true,
 654      "reasoning_levels": [
 655        "low",
 656        "medium",
 657        "high"
 658      ],
 659      "default_reasoning_effort": "medium",
 660      "supports_attachments": true,
 661      "options": {}
 662    },
 663    {
 664      "id": "google/gemini-2.5-flash-lite-preview-09-2025",
 665      "name": "Google: Gemini 2.5 Flash Lite Preview 09-2025",
 666      "cost_per_1m_in": 0.09999999999999999,
 667      "cost_per_1m_out": 0.39999999999999997,
 668      "cost_per_1m_in_cached": 0,
 669      "cost_per_1m_out_cached": 0,
 670      "context_window": 1048576,
 671      "default_max_tokens": 32767,
 672      "can_reason": true,
 673      "reasoning_levels": [
 674        "low",
 675        "medium",
 676        "high"
 677      ],
 678      "default_reasoning_effort": "medium",
 679      "supports_attachments": true,
 680      "options": {}
 681    },
 682    {
 683      "id": "google/gemini-2.5-flash-preview-09-2025",
 684      "name": "Google: Gemini 2.5 Flash Preview 09-2025",
 685      "cost_per_1m_in": 0.3,
 686      "cost_per_1m_out": 2.5,
 687      "cost_per_1m_in_cached": 0.3833,
 688      "cost_per_1m_out_cached": 0.075,
 689      "context_window": 1048576,
 690      "default_max_tokens": 32767,
 691      "can_reason": true,
 692      "reasoning_levels": [
 693        "low",
 694        "medium",
 695        "high"
 696      ],
 697      "default_reasoning_effort": "medium",
 698      "supports_attachments": true,
 699      "options": {}
 700    },
 701    {
 702      "id": "google/gemini-2.5-pro",
 703      "name": "Google: Gemini 2.5 Pro",
 704      "cost_per_1m_in": 1.25,
 705      "cost_per_1m_out": 10,
 706      "cost_per_1m_in_cached": 1.625,
 707      "cost_per_1m_out_cached": 0.125,
 708      "context_window": 1048576,
 709      "default_max_tokens": 32768,
 710      "can_reason": true,
 711      "reasoning_levels": [
 712        "low",
 713        "medium",
 714        "high"
 715      ],
 716      "default_reasoning_effort": "medium",
 717      "supports_attachments": true,
 718      "options": {}
 719    },
 720    {
 721      "id": "google/gemini-2.5-pro-preview-05-06",
 722      "name": "Google: Gemini 2.5 Pro Preview 05-06",
 723      "cost_per_1m_in": 1.25,
 724      "cost_per_1m_out": 10,
 725      "cost_per_1m_in_cached": 1.625,
 726      "cost_per_1m_out_cached": 0.125,
 727      "context_window": 1048576,
 728      "default_max_tokens": 32768,
 729      "can_reason": true,
 730      "reasoning_levels": [
 731        "low",
 732        "medium",
 733        "high"
 734      ],
 735      "default_reasoning_effort": "medium",
 736      "supports_attachments": true,
 737      "options": {}
 738    },
 739    {
 740      "id": "google/gemini-2.5-pro-preview",
 741      "name": "Google: Gemini 2.5 Pro Preview 06-05",
 742      "cost_per_1m_in": 1.25,
 743      "cost_per_1m_out": 10,
 744      "cost_per_1m_in_cached": 1.625,
 745      "cost_per_1m_out_cached": 0.125,
 746      "context_window": 1048576,
 747      "default_max_tokens": 32768,
 748      "can_reason": true,
 749      "reasoning_levels": [
 750        "low",
 751        "medium",
 752        "high"
 753      ],
 754      "default_reasoning_effort": "medium",
 755      "supports_attachments": true,
 756      "options": {}
 757    },
 758    {
 759      "id": "inception/mercury",
 760      "name": "Inception: Mercury",
 761      "cost_per_1m_in": 0.25,
 762      "cost_per_1m_out": 1,
 763      "cost_per_1m_in_cached": 0,
 764      "cost_per_1m_out_cached": 0,
 765      "context_window": 128000,
 766      "default_max_tokens": 8192,
 767      "can_reason": false,
 768      "supports_attachments": false,
 769      "options": {}
 770    },
 771    {
 772      "id": "inception/mercury-coder",
 773      "name": "Inception: Mercury Coder",
 774      "cost_per_1m_in": 0.25,
 775      "cost_per_1m_out": 1,
 776      "cost_per_1m_in_cached": 0,
 777      "cost_per_1m_out_cached": 0,
 778      "context_window": 128000,
 779      "default_max_tokens": 8192,
 780      "can_reason": false,
 781      "supports_attachments": false,
 782      "options": {}
 783    },
 784    {
 785      "id": "kwaipilot/kat-coder-pro:free",
 786      "name": "Kwaipilot: KAT-Coder-Pro V1 (free)",
 787      "cost_per_1m_in": 0,
 788      "cost_per_1m_out": 0,
 789      "cost_per_1m_in_cached": 0,
 790      "cost_per_1m_out_cached": 0,
 791      "context_window": 256000,
 792      "default_max_tokens": 16000,
 793      "can_reason": false,
 794      "supports_attachments": false,
 795      "options": {}
 796    },
 797    {
 798      "id": "meituan/longcat-flash-chat:free",
 799      "name": "Meituan: LongCat Flash Chat (free)",
 800      "cost_per_1m_in": 0,
 801      "cost_per_1m_out": 0,
 802      "cost_per_1m_in_cached": 0,
 803      "cost_per_1m_out_cached": 0,
 804      "context_window": 131072,
 805      "default_max_tokens": 65536,
 806      "can_reason": false,
 807      "supports_attachments": false,
 808      "options": {}
 809    },
 810    {
 811      "id": "meta-llama/llama-3.1-405b-instruct",
 812      "name": "Meta: Llama 3.1 405B Instruct",
 813      "cost_per_1m_in": 3.5,
 814      "cost_per_1m_out": 3.5,
 815      "cost_per_1m_in_cached": 0,
 816      "cost_per_1m_out_cached": 0,
 817      "context_window": 130815,
 818      "default_max_tokens": 13081,
 819      "can_reason": false,
 820      "supports_attachments": false,
 821      "options": {}
 822    },
 823    {
 824      "id": "meta-llama/llama-3.1-70b-instruct",
 825      "name": "Meta: Llama 3.1 70B Instruct",
 826      "cost_per_1m_in": 0.39999999999999997,
 827      "cost_per_1m_out": 0.39999999999999997,
 828      "cost_per_1m_in_cached": 0,
 829      "cost_per_1m_out_cached": 0,
 830      "context_window": 131072,
 831      "default_max_tokens": 8192,
 832      "can_reason": false,
 833      "supports_attachments": false,
 834      "options": {}
 835    },
 836    {
 837      "id": "meta-llama/llama-3.1-8b-instruct",
 838      "name": "Meta: Llama 3.1 8B Instruct",
 839      "cost_per_1m_in": 0.049999999999999996,
 840      "cost_per_1m_out": 0.08,
 841      "cost_per_1m_in_cached": 0,
 842      "cost_per_1m_out_cached": 0,
 843      "context_window": 131072,
 844      "default_max_tokens": 65536,
 845      "can_reason": false,
 846      "supports_attachments": false,
 847      "options": {}
 848    },
 849    {
 850      "id": "meta-llama/llama-3.2-3b-instruct",
 851      "name": "Meta: Llama 3.2 3B Instruct",
 852      "cost_per_1m_in": 0.03,
 853      "cost_per_1m_out": 0.049999999999999996,
 854      "cost_per_1m_in_cached": 0,
 855      "cost_per_1m_out_cached": 0,
 856      "context_window": 32768,
 857      "default_max_tokens": 16000,
 858      "can_reason": false,
 859      "supports_attachments": false,
 860      "options": {}
 861    },
 862    {
 863      "id": "meta-llama/llama-3.3-70b-instruct",
 864      "name": "Meta: Llama 3.3 70B Instruct",
 865      "cost_per_1m_in": 0.13,
 866      "cost_per_1m_out": 0.39999999999999997,
 867      "cost_per_1m_in_cached": 0,
 868      "cost_per_1m_out_cached": 0,
 869      "context_window": 131072,
 870      "default_max_tokens": 13107,
 871      "can_reason": false,
 872      "supports_attachments": false,
 873      "options": {}
 874    },
 875    {
 876      "id": "meta-llama/llama-3.3-70b-instruct:free",
 877      "name": "Meta: Llama 3.3 70B Instruct (free)",
 878      "cost_per_1m_in": 0,
 879      "cost_per_1m_out": 0,
 880      "cost_per_1m_in_cached": 0,
 881      "cost_per_1m_out_cached": 0,
 882      "context_window": 131072,
 883      "default_max_tokens": 13107,
 884      "can_reason": false,
 885      "supports_attachments": false,
 886      "options": {}
 887    },
 888    {
 889      "id": "meta-llama/llama-4-maverick",
 890      "name": "Meta: Llama 4 Maverick",
 891      "cost_per_1m_in": 0.27,
 892      "cost_per_1m_out": 0.85,
 893      "cost_per_1m_in_cached": 0,
 894      "cost_per_1m_out_cached": 0,
 895      "context_window": 1048576,
 896      "default_max_tokens": 104857,
 897      "can_reason": false,
 898      "supports_attachments": true,
 899      "options": {}
 900    },
 901    {
 902      "id": "meta-llama/llama-4-scout",
 903      "name": "Meta: Llama 4 Scout",
 904      "cost_per_1m_in": 0.25,
 905      "cost_per_1m_out": 0.7,
 906      "cost_per_1m_in_cached": 0,
 907      "cost_per_1m_out_cached": 0,
 908      "context_window": 1310720,
 909      "default_max_tokens": 4096,
 910      "can_reason": false,
 911      "supports_attachments": true,
 912      "options": {}
 913    },
 914    {
 915      "id": "microsoft/phi-3-medium-128k-instruct",
 916      "name": "Microsoft: Phi-3 Medium 128K Instruct",
 917      "cost_per_1m_in": 1,
 918      "cost_per_1m_out": 1,
 919      "cost_per_1m_in_cached": 0,
 920      "cost_per_1m_out_cached": 0,
 921      "context_window": 128000,
 922      "default_max_tokens": 12800,
 923      "can_reason": false,
 924      "supports_attachments": false,
 925      "options": {}
 926    },
 927    {
 928      "id": "microsoft/phi-3-mini-128k-instruct",
 929      "name": "Microsoft: Phi-3 Mini 128K Instruct",
 930      "cost_per_1m_in": 0.09999999999999999,
 931      "cost_per_1m_out": 0.09999999999999999,
 932      "cost_per_1m_in_cached": 0,
 933      "cost_per_1m_out_cached": 0,
 934      "context_window": 128000,
 935      "default_max_tokens": 12800,
 936      "can_reason": false,
 937      "supports_attachments": false,
 938      "options": {}
 939    },
 940    {
 941      "id": "microsoft/phi-3.5-mini-128k-instruct",
 942      "name": "Microsoft: Phi-3.5 Mini 128K Instruct",
 943      "cost_per_1m_in": 0.09999999999999999,
 944      "cost_per_1m_out": 0.09999999999999999,
 945      "cost_per_1m_in_cached": 0,
 946      "cost_per_1m_out_cached": 0,
 947      "context_window": 128000,
 948      "default_max_tokens": 12800,
 949      "can_reason": false,
 950      "supports_attachments": false,
 951      "options": {}
 952    },
 953    {
 954      "id": "minimax/minimax-m2",
 955      "name": "MiniMax: MiniMax M2",
 956      "cost_per_1m_in": 0.3,
 957      "cost_per_1m_out": 1.2,
 958      "cost_per_1m_in_cached": 0,
 959      "cost_per_1m_out_cached": 0,
 960      "context_window": 204800,
 961      "default_max_tokens": 65536,
 962      "can_reason": true,
 963      "reasoning_levels": [
 964        "low",
 965        "medium",
 966        "high"
 967      ],
 968      "default_reasoning_effort": "medium",
 969      "supports_attachments": false,
 970      "options": {}
 971    },
 972    {
 973      "id": "mistralai/mistral-large",
 974      "name": "Mistral Large",
 975      "cost_per_1m_in": 2,
 976      "cost_per_1m_out": 6,
 977      "cost_per_1m_in_cached": 0,
 978      "cost_per_1m_out_cached": 0,
 979      "context_window": 128000,
 980      "default_max_tokens": 12800,
 981      "can_reason": false,
 982      "supports_attachments": false,
 983      "options": {}
 984    },
 985    {
 986      "id": "mistralai/mistral-large-2407",
 987      "name": "Mistral Large 2407",
 988      "cost_per_1m_in": 2,
 989      "cost_per_1m_out": 6,
 990      "cost_per_1m_in_cached": 0,
 991      "cost_per_1m_out_cached": 0,
 992      "context_window": 131072,
 993      "default_max_tokens": 13107,
 994      "can_reason": false,
 995      "supports_attachments": false,
 996      "options": {}
 997    },
 998    {
 999      "id": "mistralai/mistral-large-2411",
1000      "name": "Mistral Large 2411",
1001      "cost_per_1m_in": 2,
1002      "cost_per_1m_out": 6,
1003      "cost_per_1m_in_cached": 0,
1004      "cost_per_1m_out_cached": 0,
1005      "context_window": 131072,
1006      "default_max_tokens": 13107,
1007      "can_reason": false,
1008      "supports_attachments": false,
1009      "options": {}
1010    },
1011    {
1012      "id": "mistralai/mistral-small",
1013      "name": "Mistral Small",
1014      "cost_per_1m_in": 0.19999999999999998,
1015      "cost_per_1m_out": 0.6,
1016      "cost_per_1m_in_cached": 0,
1017      "cost_per_1m_out_cached": 0,
1018      "context_window": 32768,
1019      "default_max_tokens": 3276,
1020      "can_reason": false,
1021      "supports_attachments": false,
1022      "options": {}
1023    },
1024    {
1025      "id": "mistralai/mistral-tiny",
1026      "name": "Mistral Tiny",
1027      "cost_per_1m_in": 0.25,
1028      "cost_per_1m_out": 0.25,
1029      "cost_per_1m_in_cached": 0,
1030      "cost_per_1m_out_cached": 0,
1031      "context_window": 32768,
1032      "default_max_tokens": 3276,
1033      "can_reason": false,
1034      "supports_attachments": false,
1035      "options": {}
1036    },
1037    {
1038      "id": "mistralai/codestral-2501",
1039      "name": "Mistral: Codestral 2501",
1040      "cost_per_1m_in": 0.3,
1041      "cost_per_1m_out": 0.8999999999999999,
1042      "cost_per_1m_in_cached": 0,
1043      "cost_per_1m_out_cached": 0,
1044      "context_window": 256000,
1045      "default_max_tokens": 25600,
1046      "can_reason": false,
1047      "supports_attachments": false,
1048      "options": {}
1049    },
1050    {
1051      "id": "mistralai/codestral-2508",
1052      "name": "Mistral: Codestral 2508",
1053      "cost_per_1m_in": 0.3,
1054      "cost_per_1m_out": 0.8999999999999999,
1055      "cost_per_1m_in_cached": 0,
1056      "cost_per_1m_out_cached": 0,
1057      "context_window": 256000,
1058      "default_max_tokens": 25600,
1059      "can_reason": false,
1060      "supports_attachments": false,
1061      "options": {}
1062    },
1063    {
1064      "id": "mistralai/devstral-medium",
1065      "name": "Mistral: Devstral Medium",
1066      "cost_per_1m_in": 0.39999999999999997,
1067      "cost_per_1m_out": 2,
1068      "cost_per_1m_in_cached": 0,
1069      "cost_per_1m_out_cached": 0,
1070      "context_window": 131072,
1071      "default_max_tokens": 13107,
1072      "can_reason": false,
1073      "supports_attachments": false,
1074      "options": {}
1075    },
1076    {
1077      "id": "mistralai/devstral-small",
1078      "name": "Mistral: Devstral Small 1.1",
1079      "cost_per_1m_in": 0.09999999999999999,
1080      "cost_per_1m_out": 0.3,
1081      "cost_per_1m_in_cached": 0,
1082      "cost_per_1m_out_cached": 0,
1083      "context_window": 131072,
1084      "default_max_tokens": 13107,
1085      "can_reason": false,
1086      "supports_attachments": false,
1087      "options": {}
1088    },
1089    {
1090      "id": "mistralai/magistral-medium-2506",
1091      "name": "Mistral: Magistral Medium 2506",
1092      "cost_per_1m_in": 2,
1093      "cost_per_1m_out": 5,
1094      "cost_per_1m_in_cached": 0,
1095      "cost_per_1m_out_cached": 0,
1096      "context_window": 40960,
1097      "default_max_tokens": 20000,
1098      "can_reason": true,
1099      "reasoning_levels": [
1100        "low",
1101        "medium",
1102        "high"
1103      ],
1104      "default_reasoning_effort": "medium",
1105      "supports_attachments": false,
1106      "options": {}
1107    },
1108    {
1109      "id": "mistralai/magistral-medium-2506:thinking",
1110      "name": "Mistral: Magistral Medium 2506 (thinking)",
1111      "cost_per_1m_in": 2,
1112      "cost_per_1m_out": 5,
1113      "cost_per_1m_in_cached": 0,
1114      "cost_per_1m_out_cached": 0,
1115      "context_window": 40960,
1116      "default_max_tokens": 20000,
1117      "can_reason": true,
1118      "reasoning_levels": [
1119        "low",
1120        "medium",
1121        "high"
1122      ],
1123      "default_reasoning_effort": "medium",
1124      "supports_attachments": false,
1125      "options": {}
1126    },
1127    {
1128      "id": "mistralai/magistral-small-2506",
1129      "name": "Mistral: Magistral Small 2506",
1130      "cost_per_1m_in": 0.5,
1131      "cost_per_1m_out": 1.5,
1132      "cost_per_1m_in_cached": 0,
1133      "cost_per_1m_out_cached": 0,
1134      "context_window": 40000,
1135      "default_max_tokens": 20000,
1136      "can_reason": true,
1137      "reasoning_levels": [
1138        "low",
1139        "medium",
1140        "high"
1141      ],
1142      "default_reasoning_effort": "medium",
1143      "supports_attachments": false,
1144      "options": {}
1145    },
1146    {
1147      "id": "mistralai/ministral-3b",
1148      "name": "Mistral: Ministral 3B",
1149      "cost_per_1m_in": 0.04,
1150      "cost_per_1m_out": 0.04,
1151      "cost_per_1m_in_cached": 0,
1152      "cost_per_1m_out_cached": 0,
1153      "context_window": 131072,
1154      "default_max_tokens": 13107,
1155      "can_reason": false,
1156      "supports_attachments": false,
1157      "options": {}
1158    },
1159    {
1160      "id": "mistralai/ministral-8b",
1161      "name": "Mistral: Ministral 8B",
1162      "cost_per_1m_in": 0.09999999999999999,
1163      "cost_per_1m_out": 0.09999999999999999,
1164      "cost_per_1m_in_cached": 0,
1165      "cost_per_1m_out_cached": 0,
1166      "context_window": 131072,
1167      "default_max_tokens": 13107,
1168      "can_reason": false,
1169      "supports_attachments": false,
1170      "options": {}
1171    },
1172    {
1173      "id": "mistralai/mistral-7b-instruct",
1174      "name": "Mistral: Mistral 7B Instruct",
1175      "cost_per_1m_in": 0.028,
1176      "cost_per_1m_out": 0.054,
1177      "cost_per_1m_in_cached": 0,
1178      "cost_per_1m_out_cached": 0,
1179      "context_window": 32768,
1180      "default_max_tokens": 8192,
1181      "can_reason": false,
1182      "supports_attachments": false,
1183      "options": {}
1184    },
1185    {
1186      "id": "mistralai/mistral-7b-instruct:free",
1187      "name": "Mistral: Mistral 7B Instruct (free)",
1188      "cost_per_1m_in": 0,
1189      "cost_per_1m_out": 0,
1190      "cost_per_1m_in_cached": 0,
1191      "cost_per_1m_out_cached": 0,
1192      "context_window": 32768,
1193      "default_max_tokens": 8192,
1194      "can_reason": false,
1195      "supports_attachments": false,
1196      "options": {}
1197    },
1198    {
1199      "id": "mistralai/mistral-medium-3",
1200      "name": "Mistral: Mistral Medium 3",
1201      "cost_per_1m_in": 0.39999999999999997,
1202      "cost_per_1m_out": 2,
1203      "cost_per_1m_in_cached": 0,
1204      "cost_per_1m_out_cached": 0,
1205      "context_window": 131072,
1206      "default_max_tokens": 13107,
1207      "can_reason": false,
1208      "supports_attachments": true,
1209      "options": {}
1210    },
1211    {
1212      "id": "mistralai/mistral-medium-3.1",
1213      "name": "Mistral: Mistral Medium 3.1",
1214      "cost_per_1m_in": 0.39999999999999997,
1215      "cost_per_1m_out": 2,
1216      "cost_per_1m_in_cached": 0,
1217      "cost_per_1m_out_cached": 0,
1218      "context_window": 131072,
1219      "default_max_tokens": 13107,
1220      "can_reason": false,
1221      "supports_attachments": true,
1222      "options": {}
1223    },
1224    {
1225      "id": "mistralai/mistral-nemo",
1226      "name": "Mistral: Mistral Nemo",
1227      "cost_per_1m_in": 0.15,
1228      "cost_per_1m_out": 0.15,
1229      "cost_per_1m_in_cached": 0,
1230      "cost_per_1m_out_cached": 0,
1231      "context_window": 131072,
1232      "default_max_tokens": 13107,
1233      "can_reason": false,
1234      "supports_attachments": false,
1235      "options": {}
1236    },
1237    {
1238      "id": "mistralai/mistral-small-24b-instruct-2501",
1239      "name": "Mistral: Mistral Small 3",
1240      "cost_per_1m_in": 0.09999999999999999,
1241      "cost_per_1m_out": 0.3,
1242      "cost_per_1m_in_cached": 0,
1243      "cost_per_1m_out_cached": 0,
1244      "context_window": 32768,
1245      "default_max_tokens": 3276,
1246      "can_reason": false,
1247      "supports_attachments": false,
1248      "options": {}
1249    },
1250    {
1251      "id": "mistralai/mistral-small-3.1-24b-instruct",
1252      "name": "Mistral: Mistral Small 3.1 24B",
1253      "cost_per_1m_in": 0.09999999999999999,
1254      "cost_per_1m_out": 0.3,
1255      "cost_per_1m_in_cached": 0,
1256      "cost_per_1m_out_cached": 0,
1257      "context_window": 131072,
1258      "default_max_tokens": 13107,
1259      "can_reason": false,
1260      "supports_attachments": true,
1261      "options": {}
1262    },
1263    {
1264      "id": "mistralai/mistral-small-3.1-24b-instruct:free",
1265      "name": "Mistral: Mistral Small 3.1 24B (free)",
1266      "cost_per_1m_in": 0,
1267      "cost_per_1m_out": 0,
1268      "cost_per_1m_in_cached": 0,
1269      "cost_per_1m_out_cached": 0,
1270      "context_window": 96000,
1271      "default_max_tokens": 48000,
1272      "can_reason": false,
1273      "supports_attachments": true,
1274      "options": {}
1275    },
1276    {
1277      "id": "mistralai/mistral-small-3.2-24b-instruct",
1278      "name": "Mistral: Mistral Small 3.2 24B",
1279      "cost_per_1m_in": 0.09999999999999999,
1280      "cost_per_1m_out": 0.3,
1281      "cost_per_1m_in_cached": 0,
1282      "cost_per_1m_out_cached": 0,
1283      "context_window": 131072,
1284      "default_max_tokens": 13107,
1285      "can_reason": false,
1286      "supports_attachments": true,
1287      "options": {}
1288    },
1289    {
1290      "id": "mistralai/mistral-small-3.2-24b-instruct:free",
1291      "name": "Mistral: Mistral Small 3.2 24B (free)",
1292      "cost_per_1m_in": 0,
1293      "cost_per_1m_out": 0,
1294      "cost_per_1m_in_cached": 0,
1295      "cost_per_1m_out_cached": 0,
1296      "context_window": 131072,
1297      "default_max_tokens": 13107,
1298      "can_reason": false,
1299      "supports_attachments": true,
1300      "options": {}
1301    },
1302    {
1303      "id": "mistralai/mixtral-8x22b-instruct",
1304      "name": "Mistral: Mixtral 8x22B Instruct",
1305      "cost_per_1m_in": 2,
1306      "cost_per_1m_out": 6,
1307      "cost_per_1m_in_cached": 0,
1308      "cost_per_1m_out_cached": 0,
1309      "context_window": 65536,
1310      "default_max_tokens": 6553,
1311      "can_reason": false,
1312      "supports_attachments": false,
1313      "options": {}
1314    },
1315    {
1316      "id": "mistralai/mixtral-8x7b-instruct",
1317      "name": "Mistral: Mixtral 8x7B Instruct",
1318      "cost_per_1m_in": 0.54,
1319      "cost_per_1m_out": 0.54,
1320      "cost_per_1m_in_cached": 0,
1321      "cost_per_1m_out_cached": 0,
1322      "context_window": 32768,
1323      "default_max_tokens": 8192,
1324      "can_reason": false,
1325      "supports_attachments": false,
1326      "options": {}
1327    },
1328    {
1329      "id": "mistralai/pixtral-large-2411",
1330      "name": "Mistral: Pixtral Large 2411",
1331      "cost_per_1m_in": 2,
1332      "cost_per_1m_out": 6,
1333      "cost_per_1m_in_cached": 0,
1334      "cost_per_1m_out_cached": 0,
1335      "context_window": 131072,
1336      "default_max_tokens": 13107,
1337      "can_reason": false,
1338      "supports_attachments": true,
1339      "options": {}
1340    },
1341    {
1342      "id": "mistralai/mistral-saba",
1343      "name": "Mistral: Saba",
1344      "cost_per_1m_in": 0.19999999999999998,
1345      "cost_per_1m_out": 0.6,
1346      "cost_per_1m_in_cached": 0,
1347      "cost_per_1m_out_cached": 0,
1348      "context_window": 32768,
1349      "default_max_tokens": 3276,
1350      "can_reason": false,
1351      "supports_attachments": false,
1352      "options": {}
1353    },
1354    {
1355      "id": "mistralai/voxtral-small-24b-2507",
1356      "name": "Mistral: Voxtral Small 24B 2507",
1357      "cost_per_1m_in": 0.09999999999999999,
1358      "cost_per_1m_out": 0.3,
1359      "cost_per_1m_in_cached": 0,
1360      "cost_per_1m_out_cached": 0,
1361      "context_window": 32000,
1362      "default_max_tokens": 3200,
1363      "can_reason": false,
1364      "supports_attachments": false,
1365      "options": {}
1366    },
1367    {
1368      "id": "moonshotai/kimi-k2",
1369      "name": "MoonshotAI: Kimi K2 0711",
1370      "cost_per_1m_in": 0.5700000000000001,
1371      "cost_per_1m_out": 2.3,
1372      "cost_per_1m_in_cached": 0,
1373      "cost_per_1m_out_cached": 0,
1374      "context_window": 131072,
1375      "default_max_tokens": 65536,
1376      "can_reason": false,
1377      "supports_attachments": false,
1378      "options": {}
1379    },
1380    {
1381      "id": "moonshotai/kimi-k2-0905",
1382      "name": "MoonshotAI: Kimi K2 0905",
1383      "cost_per_1m_in": 0.5,
1384      "cost_per_1m_out": 2,
1385      "cost_per_1m_in_cached": 0,
1386      "cost_per_1m_out_cached": 0,
1387      "context_window": 262144,
1388      "default_max_tokens": 26214,
1389      "can_reason": false,
1390      "supports_attachments": false,
1391      "options": {}
1392    },
1393    {
1394      "id": "moonshotai/kimi-k2-0905:exacto",
1395      "name": "MoonshotAI: Kimi K2 0905 (exacto)",
1396      "cost_per_1m_in": 0.6,
1397      "cost_per_1m_out": 2.5,
1398      "cost_per_1m_in_cached": 0,
1399      "cost_per_1m_out_cached": 0,
1400      "context_window": 262144,
1401      "default_max_tokens": 26214,
1402      "can_reason": false,
1403      "supports_attachments": false,
1404      "options": {}
1405    },
1406    {
1407      "id": "moonshotai/kimi-k2-thinking",
1408      "name": "MoonshotAI: Kimi K2 Thinking",
1409      "cost_per_1m_in": 0.6,
1410      "cost_per_1m_out": 2.5,
1411      "cost_per_1m_in_cached": 0,
1412      "cost_per_1m_out_cached": 0,
1413      "context_window": 262144,
1414      "default_max_tokens": 26214,
1415      "can_reason": true,
1416      "reasoning_levels": [
1417        "low",
1418        "medium",
1419        "high"
1420      ],
1421      "default_reasoning_effort": "medium",
1422      "supports_attachments": false,
1423      "options": {}
1424    },
1425    {
1426      "id": "nvidia/llama-3.1-nemotron-70b-instruct",
1427      "name": "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
1428      "cost_per_1m_in": 1.2,
1429      "cost_per_1m_out": 1.2,
1430      "cost_per_1m_in_cached": 0,
1431      "cost_per_1m_out_cached": 0,
1432      "context_window": 131072,
1433      "default_max_tokens": 8192,
1434      "can_reason": false,
1435      "supports_attachments": false,
1436      "options": {}
1437    },
1438    {
1439      "id": "nvidia/llama-3.3-nemotron-super-49b-v1.5",
1440      "name": "NVIDIA: Llama 3.3 Nemotron Super 49B V1.5",
1441      "cost_per_1m_in": 0.09999999999999999,
1442      "cost_per_1m_out": 0.39999999999999997,
1443      "cost_per_1m_in_cached": 0,
1444      "cost_per_1m_out_cached": 0,
1445      "context_window": 131072,
1446      "default_max_tokens": 13107,
1447      "can_reason": true,
1448      "reasoning_levels": [
1449        "low",
1450        "medium",
1451        "high"
1452      ],
1453      "default_reasoning_effort": "medium",
1454      "supports_attachments": false,
1455      "options": {}
1456    },
1457    {
1458      "id": "nvidia/nemotron-nano-12b-v2-vl:free",
1459      "name": "NVIDIA: Nemotron Nano 12B 2 VL (free)",
1460      "cost_per_1m_in": 0,
1461      "cost_per_1m_out": 0,
1462      "cost_per_1m_in_cached": 0,
1463      "cost_per_1m_out_cached": 0,
1464      "context_window": 128000,
1465      "default_max_tokens": 64000,
1466      "can_reason": true,
1467      "reasoning_levels": [
1468        "low",
1469        "medium",
1470        "high"
1471      ],
1472      "default_reasoning_effort": "medium",
1473      "supports_attachments": true,
1474      "options": {}
1475    },
1476    {
1477      "id": "nvidia/nemotron-nano-9b-v2",
1478      "name": "NVIDIA: Nemotron Nano 9B V2",
1479      "cost_per_1m_in": 0.04,
1480      "cost_per_1m_out": 0.16,
1481      "cost_per_1m_in_cached": 0,
1482      "cost_per_1m_out_cached": 0,
1483      "context_window": 131072,
1484      "default_max_tokens": 13107,
1485      "can_reason": true,
1486      "reasoning_levels": [
1487        "low",
1488        "medium",
1489        "high"
1490      ],
1491      "default_reasoning_effort": "medium",
1492      "supports_attachments": false,
1493      "options": {}
1494    },
1495    {
1496      "id": "nvidia/nemotron-nano-9b-v2:free",
1497      "name": "NVIDIA: Nemotron Nano 9B V2 (free)",
1498      "cost_per_1m_in": 0,
1499      "cost_per_1m_out": 0,
1500      "cost_per_1m_in_cached": 0,
1501      "cost_per_1m_out_cached": 0,
1502      "context_window": 128000,
1503      "default_max_tokens": 12800,
1504      "can_reason": true,
1505      "reasoning_levels": [
1506        "low",
1507        "medium",
1508        "high"
1509      ],
1510      "default_reasoning_effort": "medium",
1511      "supports_attachments": false,
1512      "options": {}
1513    },
1514    {
1515      "id": "nousresearch/deephermes-3-mistral-24b-preview",
1516      "name": "Nous: DeepHermes 3 Mistral 24B Preview",
1517      "cost_per_1m_in": 0.15,
1518      "cost_per_1m_out": 0.59,
1519      "cost_per_1m_in_cached": 0,
1520      "cost_per_1m_out_cached": 0,
1521      "context_window": 32768,
1522      "default_max_tokens": 16384,
1523      "can_reason": true,
1524      "reasoning_levels": [
1525        "low",
1526        "medium",
1527        "high"
1528      ],
1529      "default_reasoning_effort": "medium",
1530      "supports_attachments": false,
1531      "options": {}
1532    },
1533    {
1534      "id": "nousresearch/hermes-4-405b",
1535      "name": "Nous: Hermes 4 405B",
1536      "cost_per_1m_in": 0.3,
1537      "cost_per_1m_out": 1.2,
1538      "cost_per_1m_in_cached": 0,
1539      "cost_per_1m_out_cached": 0,
1540      "context_window": 131072,
1541      "default_max_tokens": 65536,
1542      "can_reason": true,
1543      "reasoning_levels": [
1544        "low",
1545        "medium",
1546        "high"
1547      ],
1548      "default_reasoning_effort": "medium",
1549      "supports_attachments": false,
1550      "options": {}
1551    },
1552    {
1553      "id": "openai/codex-mini",
1554      "name": "OpenAI: Codex Mini",
1555      "cost_per_1m_in": 1.5,
1556      "cost_per_1m_out": 6,
1557      "cost_per_1m_in_cached": 0,
1558      "cost_per_1m_out_cached": 0.375,
1559      "context_window": 200000,
1560      "default_max_tokens": 50000,
1561      "can_reason": true,
1562      "reasoning_levels": [
1563        "low",
1564        "medium",
1565        "high"
1566      ],
1567      "default_reasoning_effort": "medium",
1568      "supports_attachments": true,
1569      "options": {}
1570    },
1571    {
1572      "id": "openai/gpt-4-turbo",
1573      "name": "OpenAI: GPT-4 Turbo",
1574      "cost_per_1m_in": 10,
1575      "cost_per_1m_out": 30,
1576      "cost_per_1m_in_cached": 0,
1577      "cost_per_1m_out_cached": 0,
1578      "context_window": 128000,
1579      "default_max_tokens": 2048,
1580      "can_reason": false,
1581      "supports_attachments": true,
1582      "options": {}
1583    },
1584    {
1585      "id": "openai/gpt-4-1106-preview",
1586      "name": "OpenAI: GPT-4 Turbo (older v1106)",
1587      "cost_per_1m_in": 10,
1588      "cost_per_1m_out": 30,
1589      "cost_per_1m_in_cached": 0,
1590      "cost_per_1m_out_cached": 0,
1591      "context_window": 128000,
1592      "default_max_tokens": 2048,
1593      "can_reason": false,
1594      "supports_attachments": false,
1595      "options": {}
1596    },
1597    {
1598      "id": "openai/gpt-4-turbo-preview",
1599      "name": "OpenAI: GPT-4 Turbo Preview",
1600      "cost_per_1m_in": 10,
1601      "cost_per_1m_out": 30,
1602      "cost_per_1m_in_cached": 0,
1603      "cost_per_1m_out_cached": 0,
1604      "context_window": 128000,
1605      "default_max_tokens": 2048,
1606      "can_reason": false,
1607      "supports_attachments": false,
1608      "options": {}
1609    },
1610    {
1611      "id": "openai/gpt-4.1",
1612      "name": "OpenAI: GPT-4.1",
1613      "cost_per_1m_in": 2,
1614      "cost_per_1m_out": 8,
1615      "cost_per_1m_in_cached": 0,
1616      "cost_per_1m_out_cached": 0.5,
1617      "context_window": 1047576,
1618      "default_max_tokens": 104757,
1619      "can_reason": false,
1620      "supports_attachments": true,
1621      "options": {}
1622    },
1623    {
1624      "id": "openai/gpt-4.1-mini",
1625      "name": "OpenAI: GPT-4.1 Mini",
1626      "cost_per_1m_in": 0.39999999999999997,
1627      "cost_per_1m_out": 1.5999999999999999,
1628      "cost_per_1m_in_cached": 0,
1629      "cost_per_1m_out_cached": 0.09999999999999999,
1630      "context_window": 1047576,
1631      "default_max_tokens": 104757,
1632      "can_reason": false,
1633      "supports_attachments": true,
1634      "options": {}
1635    },
1636    {
1637      "id": "openai/gpt-4.1-nano",
1638      "name": "OpenAI: GPT-4.1 Nano",
1639      "cost_per_1m_in": 0.09999999999999999,
1640      "cost_per_1m_out": 0.39999999999999997,
1641      "cost_per_1m_in_cached": 0,
1642      "cost_per_1m_out_cached": 0.03,
1643      "context_window": 1047576,
1644      "default_max_tokens": 104757,
1645      "can_reason": false,
1646      "supports_attachments": true,
1647      "options": {}
1648    },
1649    {
1650      "id": "openai/gpt-4o",
1651      "name": "OpenAI: GPT-4o",
1652      "cost_per_1m_in": 2.5,
1653      "cost_per_1m_out": 10,
1654      "cost_per_1m_in_cached": 0,
1655      "cost_per_1m_out_cached": 0,
1656      "context_window": 128000,
1657      "default_max_tokens": 8192,
1658      "can_reason": false,
1659      "supports_attachments": true,
1660      "options": {}
1661    },
1662    {
1663      "id": "openai/gpt-4o-2024-05-13",
1664      "name": "OpenAI: GPT-4o (2024-05-13)",
1665      "cost_per_1m_in": 5,
1666      "cost_per_1m_out": 15,
1667      "cost_per_1m_in_cached": 0,
1668      "cost_per_1m_out_cached": 0,
1669      "context_window": 128000,
1670      "default_max_tokens": 2048,
1671      "can_reason": false,
1672      "supports_attachments": true,
1673      "options": {}
1674    },
1675    {
1676      "id": "openai/gpt-4o-2024-08-06",
1677      "name": "OpenAI: GPT-4o (2024-08-06)",
1678      "cost_per_1m_in": 2.5,
1679      "cost_per_1m_out": 10,
1680      "cost_per_1m_in_cached": 0,
1681      "cost_per_1m_out_cached": 1.25,
1682      "context_window": 128000,
1683      "default_max_tokens": 8192,
1684      "can_reason": false,
1685      "supports_attachments": true,
1686      "options": {}
1687    },
1688    {
1689      "id": "openai/gpt-4o-2024-11-20",
1690      "name": "OpenAI: GPT-4o (2024-11-20)",
1691      "cost_per_1m_in": 2.5,
1692      "cost_per_1m_out": 10,
1693      "cost_per_1m_in_cached": 0,
1694      "cost_per_1m_out_cached": 1.25,
1695      "context_window": 128000,
1696      "default_max_tokens": 8192,
1697      "can_reason": false,
1698      "supports_attachments": true,
1699      "options": {}
1700    },
1701    {
1702      "id": "openai/gpt-4o:extended",
1703      "name": "OpenAI: GPT-4o (extended)",
1704      "cost_per_1m_in": 6,
1705      "cost_per_1m_out": 18,
1706      "cost_per_1m_in_cached": 0,
1707      "cost_per_1m_out_cached": 0,
1708      "context_window": 128000,
1709      "default_max_tokens": 32000,
1710      "can_reason": false,
1711      "supports_attachments": true,
1712      "options": {}
1713    },
1714    {
1715      "id": "openai/gpt-4o-audio-preview",
1716      "name": "OpenAI: GPT-4o Audio",
1717      "cost_per_1m_in": 2.5,
1718      "cost_per_1m_out": 10,
1719      "cost_per_1m_in_cached": 0,
1720      "cost_per_1m_out_cached": 0,
1721      "context_window": 128000,
1722      "default_max_tokens": 8192,
1723      "can_reason": false,
1724      "supports_attachments": false,
1725      "options": {}
1726    },
1727    {
1728      "id": "openai/gpt-4o-mini",
1729      "name": "OpenAI: GPT-4o-mini",
1730      "cost_per_1m_in": 0.15,
1731      "cost_per_1m_out": 0.6,
1732      "cost_per_1m_in_cached": 0,
1733      "cost_per_1m_out_cached": 0.075,
1734      "context_window": 128000,
1735      "default_max_tokens": 8192,
1736      "can_reason": false,
1737      "supports_attachments": true,
1738      "options": {}
1739    },
1740    {
1741      "id": "openai/gpt-4o-mini-2024-07-18",
1742      "name": "OpenAI: GPT-4o-mini (2024-07-18)",
1743      "cost_per_1m_in": 0.15,
1744      "cost_per_1m_out": 0.6,
1745      "cost_per_1m_in_cached": 0,
1746      "cost_per_1m_out_cached": 0.075,
1747      "context_window": 128000,
1748      "default_max_tokens": 8192,
1749      "can_reason": false,
1750      "supports_attachments": true,
1751      "options": {}
1752    },
1753    {
1754      "id": "openai/gpt-5",
1755      "name": "OpenAI: GPT-5",
1756      "cost_per_1m_in": 1.25,
1757      "cost_per_1m_out": 10,
1758      "cost_per_1m_in_cached": 0,
1759      "cost_per_1m_out_cached": 0.125,
1760      "context_window": 400000,
1761      "default_max_tokens": 64000,
1762      "can_reason": true,
1763      "reasoning_levels": [
1764        "low",
1765        "medium",
1766        "high"
1767      ],
1768      "default_reasoning_effort": "medium",
1769      "supports_attachments": true,
1770      "options": {}
1771    },
1772    {
1773      "id": "openai/gpt-5-codex",
1774      "name": "OpenAI: GPT-5 Codex",
1775      "cost_per_1m_in": 1.25,
1776      "cost_per_1m_out": 10,
1777      "cost_per_1m_in_cached": 0,
1778      "cost_per_1m_out_cached": 0.125,
1779      "context_window": 400000,
1780      "default_max_tokens": 64000,
1781      "can_reason": true,
1782      "reasoning_levels": [
1783        "low",
1784        "medium",
1785        "high"
1786      ],
1787      "default_reasoning_effort": "medium",
1788      "supports_attachments": true,
1789      "options": {}
1790    },
1791    {
1792      "id": "openai/gpt-5-image",
1793      "name": "OpenAI: GPT-5 Image",
1794      "cost_per_1m_in": 10,
1795      "cost_per_1m_out": 10,
1796      "cost_per_1m_in_cached": 0,
1797      "cost_per_1m_out_cached": 1.25,
1798      "context_window": 400000,
1799      "default_max_tokens": 64000,
1800      "can_reason": true,
1801      "reasoning_levels": [
1802        "low",
1803        "medium",
1804        "high"
1805      ],
1806      "default_reasoning_effort": "medium",
1807      "supports_attachments": true,
1808      "options": {}
1809    },
1810    {
1811      "id": "openai/gpt-5-image-mini",
1812      "name": "OpenAI: GPT-5 Image Mini",
1813      "cost_per_1m_in": 2.5,
1814      "cost_per_1m_out": 2,
1815      "cost_per_1m_in_cached": 0,
1816      "cost_per_1m_out_cached": 0.25,
1817      "context_window": 400000,
1818      "default_max_tokens": 64000,
1819      "can_reason": true,
1820      "reasoning_levels": [
1821        "low",
1822        "medium",
1823        "high"
1824      ],
1825      "default_reasoning_effort": "medium",
1826      "supports_attachments": true,
1827      "options": {}
1828    },
1829    {
1830      "id": "openai/gpt-5-mini",
1831      "name": "OpenAI: GPT-5 Mini",
1832      "cost_per_1m_in": 0.25,
1833      "cost_per_1m_out": 2,
1834      "cost_per_1m_in_cached": 0,
1835      "cost_per_1m_out_cached": 0.03,
1836      "context_window": 400000,
1837      "default_max_tokens": 40000,
1838      "can_reason": true,
1839      "reasoning_levels": [
1840        "low",
1841        "medium",
1842        "high"
1843      ],
1844      "default_reasoning_effort": "medium",
1845      "supports_attachments": true,
1846      "options": {}
1847    },
1848    {
1849      "id": "openai/gpt-5-nano",
1850      "name": "OpenAI: GPT-5 Nano",
1851      "cost_per_1m_in": 0.049999999999999996,
1852      "cost_per_1m_out": 0.39999999999999997,
1853      "cost_per_1m_in_cached": 0,
1854      "cost_per_1m_out_cached": 0.01,
1855      "context_window": 400000,
1856      "default_max_tokens": 40000,
1857      "can_reason": true,
1858      "reasoning_levels": [
1859        "low",
1860        "medium",
1861        "high"
1862      ],
1863      "default_reasoning_effort": "medium",
1864      "supports_attachments": true,
1865      "options": {}
1866    },
1867    {
1868      "id": "openai/gpt-5-pro",
1869      "name": "OpenAI: GPT-5 Pro",
1870      "cost_per_1m_in": 15,
1871      "cost_per_1m_out": 120,
1872      "cost_per_1m_in_cached": 0,
1873      "cost_per_1m_out_cached": 0,
1874      "context_window": 400000,
1875      "default_max_tokens": 64000,
1876      "can_reason": true,
1877      "reasoning_levels": [
1878        "low",
1879        "medium",
1880        "high"
1881      ],
1882      "default_reasoning_effort": "medium",
1883      "supports_attachments": true,
1884      "options": {}
1885    },
1886    {
1887      "id": "openai/gpt-5.1",
1888      "name": "OpenAI: GPT-5.1",
1889      "cost_per_1m_in": 1.25,
1890      "cost_per_1m_out": 10,
1891      "cost_per_1m_in_cached": 0,
1892      "cost_per_1m_out_cached": 0.125,
1893      "context_window": 400000,
1894      "default_max_tokens": 64000,
1895      "can_reason": true,
1896      "reasoning_levels": [
1897        "low",
1898        "medium",
1899        "high"
1900      ],
1901      "default_reasoning_effort": "medium",
1902      "supports_attachments": true,
1903      "options": {}
1904    },
1905    {
1906      "id": "openai/gpt-5.1-chat",
1907      "name": "OpenAI: GPT-5.1 Chat",
1908      "cost_per_1m_in": 1.25,
1909      "cost_per_1m_out": 10,
1910      "cost_per_1m_in_cached": 0,
1911      "cost_per_1m_out_cached": 0.125,
1912      "context_window": 128000,
1913      "default_max_tokens": 8192,
1914      "can_reason": false,
1915      "supports_attachments": true,
1916      "options": {}
1917    },
1918    {
1919      "id": "openai/gpt-5.1-codex",
1920      "name": "OpenAI: GPT-5.1-Codex",
1921      "cost_per_1m_in": 1.25,
1922      "cost_per_1m_out": 10,
1923      "cost_per_1m_in_cached": 0,
1924      "cost_per_1m_out_cached": 0.125,
1925      "context_window": 400000,
1926      "default_max_tokens": 64000,
1927      "can_reason": true,
1928      "reasoning_levels": [
1929        "low",
1930        "medium",
1931        "high"
1932      ],
1933      "default_reasoning_effort": "medium",
1934      "supports_attachments": true,
1935      "options": {}
1936    },
1937    {
1938      "id": "openai/gpt-5.1-codex-mini",
1939      "name": "OpenAI: GPT-5.1-Codex-Mini",
1940      "cost_per_1m_in": 0.25,
1941      "cost_per_1m_out": 2,
1942      "cost_per_1m_in_cached": 0,
1943      "cost_per_1m_out_cached": 0.024999999999999998,
1944      "context_window": 400000,
1945      "default_max_tokens": 50000,
1946      "can_reason": true,
1947      "reasoning_levels": [
1948        "low",
1949        "medium",
1950        "high"
1951      ],
1952      "default_reasoning_effort": "medium",
1953      "supports_attachments": true,
1954      "options": {}
1955    },
1956    {
1957      "id": "openai/gpt-oss-120b",
1958      "name": "OpenAI: gpt-oss-120b",
1959      "cost_per_1m_in": 0.14,
1960      "cost_per_1m_out": 0.95,
1961      "cost_per_1m_in_cached": 0,
1962      "cost_per_1m_out_cached": 0,
1963      "context_window": 131072,
1964      "default_max_tokens": 65536,
1965      "can_reason": true,
1966      "reasoning_levels": [
1967        "low",
1968        "medium",
1969        "high"
1970      ],
1971      "default_reasoning_effort": "medium",
1972      "supports_attachments": false,
1973      "options": {}
1974    },
1975    {
1976      "id": "openai/gpt-oss-120b:exacto",
1977      "name": "OpenAI: gpt-oss-120b (exacto)",
1978      "cost_per_1m_in": 0.049999999999999996,
1979      "cost_per_1m_out": 0.25,
1980      "cost_per_1m_in_cached": 0,
1981      "cost_per_1m_out_cached": 0,
1982      "context_window": 131072,
1983      "default_max_tokens": 16384,
1984      "can_reason": true,
1985      "reasoning_levels": [
1986        "low",
1987        "medium",
1988        "high"
1989      ],
1990      "default_reasoning_effort": "medium",
1991      "supports_attachments": false,
1992      "options": {}
1993    },
1994    {
1995      "id": "openai/gpt-oss-20b",
1996      "name": "OpenAI: gpt-oss-20b",
1997      "cost_per_1m_in": 0.04,
1998      "cost_per_1m_out": 0.15,
1999      "cost_per_1m_in_cached": 0,
2000      "cost_per_1m_out_cached": 0,
2001      "context_window": 131072,
2002      "default_max_tokens": 13107,
2003      "can_reason": true,
2004      "reasoning_levels": [
2005        "low",
2006        "medium",
2007        "high"
2008      ],
2009      "default_reasoning_effort": "medium",
2010      "supports_attachments": false,
2011      "options": {}
2012    },
2013    {
2014      "id": "openai/gpt-oss-20b:free",
2015      "name": "OpenAI: gpt-oss-20b (free)",
2016      "cost_per_1m_in": 0,
2017      "cost_per_1m_out": 0,
2018      "cost_per_1m_in_cached": 0,
2019      "cost_per_1m_out_cached": 0,
2020      "context_window": 131072,
2021      "default_max_tokens": 65536,
2022      "can_reason": true,
2023      "reasoning_levels": [
2024        "low",
2025        "medium",
2026        "high"
2027      ],
2028      "default_reasoning_effort": "medium",
2029      "supports_attachments": false,
2030      "options": {}
2031    },
2032    {
2033      "id": "openai/gpt-oss-safeguard-20b",
2034      "name": "OpenAI: gpt-oss-safeguard-20b",
2035      "cost_per_1m_in": 0.075,
2036      "cost_per_1m_out": 0.3,
2037      "cost_per_1m_in_cached": 0,
2038      "cost_per_1m_out_cached": 0.037,
2039      "context_window": 131072,
2040      "default_max_tokens": 32768,
2041      "can_reason": true,
2042      "reasoning_levels": [
2043        "low",
2044        "medium",
2045        "high"
2046      ],
2047      "default_reasoning_effort": "medium",
2048      "supports_attachments": false,
2049      "options": {}
2050    },
2051    {
2052      "id": "openai/o1",
2053      "name": "OpenAI: o1",
2054      "cost_per_1m_in": 15,
2055      "cost_per_1m_out": 60,
2056      "cost_per_1m_in_cached": 0,
2057      "cost_per_1m_out_cached": 7.5,
2058      "context_window": 200000,
2059      "default_max_tokens": 50000,
2060      "can_reason": false,
2061      "supports_attachments": true,
2062      "options": {}
2063    },
2064    {
2065      "id": "openai/o3",
2066      "name": "OpenAI: o3",
2067      "cost_per_1m_in": 2,
2068      "cost_per_1m_out": 8,
2069      "cost_per_1m_in_cached": 0,
2070      "cost_per_1m_out_cached": 0.5,
2071      "context_window": 200000,
2072      "default_max_tokens": 50000,
2073      "can_reason": true,
2074      "reasoning_levels": [
2075        "low",
2076        "medium",
2077        "high"
2078      ],
2079      "default_reasoning_effort": "medium",
2080      "supports_attachments": true,
2081      "options": {}
2082    },
2083    {
2084      "id": "openai/o3-deep-research",
2085      "name": "OpenAI: o3 Deep Research",
2086      "cost_per_1m_in": 10,
2087      "cost_per_1m_out": 40,
2088      "cost_per_1m_in_cached": 0,
2089      "cost_per_1m_out_cached": 2.5,
2090      "context_window": 200000,
2091      "default_max_tokens": 50000,
2092      "can_reason": true,
2093      "reasoning_levels": [
2094        "low",
2095        "medium",
2096        "high"
2097      ],
2098      "default_reasoning_effort": "medium",
2099      "supports_attachments": true,
2100      "options": {}
2101    },
2102    {
2103      "id": "openai/o3-mini",
2104      "name": "OpenAI: o3 Mini",
2105      "cost_per_1m_in": 1.1,
2106      "cost_per_1m_out": 4.4,
2107      "cost_per_1m_in_cached": 0,
2108      "cost_per_1m_out_cached": 0.55,
2109      "context_window": 200000,
2110      "default_max_tokens": 50000,
2111      "can_reason": false,
2112      "supports_attachments": false,
2113      "options": {}
2114    },
2115    {
2116      "id": "openai/o3-mini-high",
2117      "name": "OpenAI: o3 Mini High",
2118      "cost_per_1m_in": 1.1,
2119      "cost_per_1m_out": 4.4,
2120      "cost_per_1m_in_cached": 0,
2121      "cost_per_1m_out_cached": 0.55,
2122      "context_window": 200000,
2123      "default_max_tokens": 50000,
2124      "can_reason": false,
2125      "supports_attachments": false,
2126      "options": {}
2127    },
2128    {
2129      "id": "openai/o3-pro",
2130      "name": "OpenAI: o3 Pro",
2131      "cost_per_1m_in": 20,
2132      "cost_per_1m_out": 80,
2133      "cost_per_1m_in_cached": 0,
2134      "cost_per_1m_out_cached": 0,
2135      "context_window": 200000,
2136      "default_max_tokens": 50000,
2137      "can_reason": true,
2138      "reasoning_levels": [
2139        "low",
2140        "medium",
2141        "high"
2142      ],
2143      "default_reasoning_effort": "medium",
2144      "supports_attachments": true,
2145      "options": {}
2146    },
2147    {
2148      "id": "openai/o4-mini",
2149      "name": "OpenAI: o4 Mini",
2150      "cost_per_1m_in": 1.1,
2151      "cost_per_1m_out": 4.4,
2152      "cost_per_1m_in_cached": 0,
2153      "cost_per_1m_out_cached": 0.275,
2154      "context_window": 200000,
2155      "default_max_tokens": 50000,
2156      "can_reason": true,
2157      "reasoning_levels": [
2158        "low",
2159        "medium",
2160        "high"
2161      ],
2162      "default_reasoning_effort": "medium",
2163      "supports_attachments": true,
2164      "options": {}
2165    },
2166    {
2167      "id": "openai/o4-mini-deep-research",
2168      "name": "OpenAI: o4 Mini Deep Research",
2169      "cost_per_1m_in": 2,
2170      "cost_per_1m_out": 8,
2171      "cost_per_1m_in_cached": 0,
2172      "cost_per_1m_out_cached": 0.5,
2173      "context_window": 200000,
2174      "default_max_tokens": 50000,
2175      "can_reason": true,
2176      "reasoning_levels": [
2177        "low",
2178        "medium",
2179        "high"
2180      ],
2181      "default_reasoning_effort": "medium",
2182      "supports_attachments": true,
2183      "options": {}
2184    },
2185    {
2186      "id": "openai/o4-mini-high",
2187      "name": "OpenAI: o4 Mini High",
2188      "cost_per_1m_in": 1.1,
2189      "cost_per_1m_out": 4.4,
2190      "cost_per_1m_in_cached": 0,
2191      "cost_per_1m_out_cached": 0.275,
2192      "context_window": 200000,
2193      "default_max_tokens": 50000,
2194      "can_reason": true,
2195      "reasoning_levels": [
2196        "low",
2197        "medium",
2198        "high"
2199      ],
2200      "default_reasoning_effort": "medium",
2201      "supports_attachments": true,
2202      "options": {}
2203    },
2204    {
2205      "id": "qwen/qwen-2.5-72b-instruct",
2206      "name": "Qwen2.5 72B Instruct",
2207      "cost_per_1m_in": 0.07,
2208      "cost_per_1m_out": 0.26,
2209      "cost_per_1m_in_cached": 0,
2210      "cost_per_1m_out_cached": 0,
2211      "context_window": 32768,
2212      "default_max_tokens": 16384,
2213      "can_reason": false,
2214      "supports_attachments": false,
2215      "options": {}
2216    },
2217    {
2218      "id": "qwen/qwen-plus-2025-07-28",
2219      "name": "Qwen: Qwen Plus 0728",
2220      "cost_per_1m_in": 0.39999999999999997,
2221      "cost_per_1m_out": 1.2,
2222      "cost_per_1m_in_cached": 0,
2223      "cost_per_1m_out_cached": 0,
2224      "context_window": 1000000,
2225      "default_max_tokens": 16384,
2226      "can_reason": false,
2227      "supports_attachments": false,
2228      "options": {}
2229    },
2230    {
2231      "id": "qwen/qwen-plus-2025-07-28:thinking",
2232      "name": "Qwen: Qwen Plus 0728 (thinking)",
2233      "cost_per_1m_in": 0.39999999999999997,
2234      "cost_per_1m_out": 4,
2235      "cost_per_1m_in_cached": 0,
2236      "cost_per_1m_out_cached": 0,
2237      "context_window": 1000000,
2238      "default_max_tokens": 16384,
2239      "can_reason": true,
2240      "reasoning_levels": [
2241        "low",
2242        "medium",
2243        "high"
2244      ],
2245      "default_reasoning_effort": "medium",
2246      "supports_attachments": false,
2247      "options": {}
2248    },
2249    {
2250      "id": "qwen/qwen-vl-max",
2251      "name": "Qwen: Qwen VL Max",
2252      "cost_per_1m_in": 0.7999999999999999,
2253      "cost_per_1m_out": 3.1999999999999997,
2254      "cost_per_1m_in_cached": 0,
2255      "cost_per_1m_out_cached": 0,
2256      "context_window": 131072,
2257      "default_max_tokens": 4096,
2258      "can_reason": false,
2259      "supports_attachments": true,
2260      "options": {}
2261    },
2262    {
2263      "id": "qwen/qwen-max",
2264      "name": "Qwen: Qwen-Max ",
2265      "cost_per_1m_in": 1.5999999999999999,
2266      "cost_per_1m_out": 6.3999999999999995,
2267      "cost_per_1m_in_cached": 0,
2268      "cost_per_1m_out_cached": 0.64,
2269      "context_window": 32768,
2270      "default_max_tokens": 4096,
2271      "can_reason": false,
2272      "supports_attachments": false,
2273      "options": {}
2274    },
2275    {
2276      "id": "qwen/qwen-plus",
2277      "name": "Qwen: Qwen-Plus",
2278      "cost_per_1m_in": 0.39999999999999997,
2279      "cost_per_1m_out": 1.2,
2280      "cost_per_1m_in_cached": 0,
2281      "cost_per_1m_out_cached": 0.16,
2282      "context_window": 131072,
2283      "default_max_tokens": 4096,
2284      "can_reason": false,
2285      "supports_attachments": false,
2286      "options": {}
2287    },
2288    {
2289      "id": "qwen/qwen-turbo",
2290      "name": "Qwen: Qwen-Turbo",
2291      "cost_per_1m_in": 0.049999999999999996,
2292      "cost_per_1m_out": 0.19999999999999998,
2293      "cost_per_1m_in_cached": 0,
2294      "cost_per_1m_out_cached": 0.02,
2295      "context_window": 1000000,
2296      "default_max_tokens": 4096,
2297      "can_reason": false,
2298      "supports_attachments": false,
2299      "options": {}
2300    },
2301    {
2302      "id": "qwen/qwen3-14b",
2303      "name": "Qwen: Qwen3 14B",
2304      "cost_per_1m_in": 0.049999999999999996,
2305      "cost_per_1m_out": 0.22,
2306      "cost_per_1m_in_cached": 0,
2307      "cost_per_1m_out_cached": 0,
2308      "context_window": 40960,
2309      "default_max_tokens": 20480,
2310      "can_reason": true,
2311      "reasoning_levels": [
2312        "low",
2313        "medium",
2314        "high"
2315      ],
2316      "default_reasoning_effort": "medium",
2317      "supports_attachments": false,
2318      "options": {}
2319    },
2320    {
2321      "id": "qwen/qwen3-235b-a22b",
2322      "name": "Qwen: Qwen3 235B A22B",
2323      "cost_per_1m_in": 0.22,
2324      "cost_per_1m_out": 0.88,
2325      "cost_per_1m_in_cached": 0,
2326      "cost_per_1m_out_cached": 0,
2327      "context_window": 131072,
2328      "default_max_tokens": 13107,
2329      "can_reason": true,
2330      "reasoning_levels": [
2331        "low",
2332        "medium",
2333        "high"
2334      ],
2335      "default_reasoning_effort": "medium",
2336      "supports_attachments": false,
2337      "options": {}
2338    },
2339    {
2340      "id": "qwen/qwen3-235b-a22b:free",
2341      "name": "Qwen: Qwen3 235B A22B (free)",
2342      "cost_per_1m_in": 0,
2343      "cost_per_1m_out": 0,
2344      "cost_per_1m_in_cached": 0,
2345      "cost_per_1m_out_cached": 0,
2346      "context_window": 131072,
2347      "default_max_tokens": 13107,
2348      "can_reason": true,
2349      "reasoning_levels": [
2350        "low",
2351        "medium",
2352        "high"
2353      ],
2354      "default_reasoning_effort": "medium",
2355      "supports_attachments": false,
2356      "options": {}
2357    },
2358    {
2359      "id": "qwen/qwen3-235b-a22b-2507",
2360      "name": "Qwen: Qwen3 235B A22B Instruct 2507",
2361      "cost_per_1m_in": 0.22,
2362      "cost_per_1m_out": 0.7999999999999999,
2363      "cost_per_1m_in_cached": 0,
2364      "cost_per_1m_out_cached": 0,
2365      "context_window": 262144,
2366      "default_max_tokens": 131072,
2367      "can_reason": true,
2368      "reasoning_levels": [
2369        "low",
2370        "medium",
2371        "high"
2372      ],
2373      "default_reasoning_effort": "medium",
2374      "supports_attachments": false,
2375      "options": {}
2376    },
2377    {
2378      "id": "qwen/qwen3-235b-a22b-thinking-2507",
2379      "name": "Qwen: Qwen3 235B A22B Thinking 2507",
2380      "cost_per_1m_in": 0.11,
2381      "cost_per_1m_out": 0.6,
2382      "cost_per_1m_in_cached": 0,
2383      "cost_per_1m_out_cached": 0,
2384      "context_window": 262144,
2385      "default_max_tokens": 131072,
2386      "can_reason": true,
2387      "reasoning_levels": [
2388        "low",
2389        "medium",
2390        "high"
2391      ],
2392      "default_reasoning_effort": "medium",
2393      "supports_attachments": false,
2394      "options": {}
2395    },
2396    {
2397      "id": "qwen/qwen3-30b-a3b",
2398      "name": "Qwen: Qwen3 30B A3B",
2399      "cost_per_1m_in": 0.08,
2400      "cost_per_1m_out": 0.28,
2401      "cost_per_1m_in_cached": 0,
2402      "cost_per_1m_out_cached": 0,
2403      "context_window": 131072,
2404      "default_max_tokens": 65536,
2405      "can_reason": true,
2406      "reasoning_levels": [
2407        "low",
2408        "medium",
2409        "high"
2410      ],
2411      "default_reasoning_effort": "medium",
2412      "supports_attachments": false,
2413      "options": {}
2414    },
2415    {
2416      "id": "qwen/qwen3-30b-a3b-instruct-2507",
2417      "name": "Qwen: Qwen3 30B A3B Instruct 2507",
2418      "cost_per_1m_in": 0.09999999999999999,
2419      "cost_per_1m_out": 0.3,
2420      "cost_per_1m_in_cached": 0,
2421      "cost_per_1m_out_cached": 0,
2422      "context_window": 262144,
2423      "default_max_tokens": 26214,
2424      "can_reason": false,
2425      "supports_attachments": false,
2426      "options": {}
2427    },
2428    {
2429      "id": "qwen/qwen3-30b-a3b-thinking-2507",
2430      "name": "Qwen: Qwen3 30B A3B Thinking 2507",
2431      "cost_per_1m_in": 0.09999999999999999,
2432      "cost_per_1m_out": 0.3,
2433      "cost_per_1m_in_cached": 0,
2434      "cost_per_1m_out_cached": 0,
2435      "context_window": 262144,
2436      "default_max_tokens": 26214,
2437      "can_reason": true,
2438      "reasoning_levels": [
2439        "low",
2440        "medium",
2441        "high"
2442      ],
2443      "default_reasoning_effort": "medium",
2444      "supports_attachments": false,
2445      "options": {}
2446    },
2447    {
2448      "id": "qwen/qwen3-32b",
2449      "name": "Qwen: Qwen3 32B",
2450      "cost_per_1m_in": 0.15,
2451      "cost_per_1m_out": 0.5,
2452      "cost_per_1m_in_cached": 0,
2453      "cost_per_1m_out_cached": 0,
2454      "context_window": 131072,
2455      "default_max_tokens": 4000,
2456      "can_reason": true,
2457      "reasoning_levels": [
2458        "low",
2459        "medium",
2460        "high"
2461      ],
2462      "default_reasoning_effort": "medium",
2463      "supports_attachments": false,
2464      "options": {}
2465    },
2466    {
2467      "id": "qwen/qwen3-4b:free",
2468      "name": "Qwen: Qwen3 4B (free)",
2469      "cost_per_1m_in": 0,
2470      "cost_per_1m_out": 0,
2471      "cost_per_1m_in_cached": 0,
2472      "cost_per_1m_out_cached": 0,
2473      "context_window": 40960,
2474      "default_max_tokens": 4096,
2475      "can_reason": true,
2476      "reasoning_levels": [
2477        "low",
2478        "medium",
2479        "high"
2480      ],
2481      "default_reasoning_effort": "medium",
2482      "supports_attachments": false,
2483      "options": {}
2484    },
2485    {
2486      "id": "qwen/qwen3-coder-30b-a3b-instruct",
2487      "name": "Qwen: Qwen3 Coder 30B A3B Instruct",
2488      "cost_per_1m_in": 0.06,
2489      "cost_per_1m_out": 0.25,
2490      "cost_per_1m_in_cached": 0,
2491      "cost_per_1m_out_cached": 0,
2492      "context_window": 262144,
2493      "default_max_tokens": 131072,
2494      "can_reason": false,
2495      "supports_attachments": false,
2496      "options": {}
2497    },
2498    {
2499      "id": "qwen/qwen3-coder",
2500      "name": "Qwen: Qwen3 Coder 480B A35B",
2501      "cost_per_1m_in": 0.29,
2502      "cost_per_1m_out": 1.2,
2503      "cost_per_1m_in_cached": 0,
2504      "cost_per_1m_out_cached": 0,
2505      "context_window": 262144,
2506      "default_max_tokens": 32768,
2507      "can_reason": false,
2508      "supports_attachments": false,
2509      "options": {}
2510    },
2511    {
2512      "id": "qwen/qwen3-coder:exacto",
2513      "name": "Qwen: Qwen3 Coder 480B A35B (exacto)",
2514      "cost_per_1m_in": 1,
2515      "cost_per_1m_out": 4,
2516      "cost_per_1m_in_cached": 0,
2517      "cost_per_1m_out_cached": 0,
2518      "context_window": 262144,
2519      "default_max_tokens": 16384,
2520      "can_reason": false,
2521      "supports_attachments": false,
2522      "options": {}
2523    },
2524    {
2525      "id": "qwen/qwen3-coder:free",
2526      "name": "Qwen: Qwen3 Coder 480B A35B (free)",
2527      "cost_per_1m_in": 0,
2528      "cost_per_1m_out": 0,
2529      "cost_per_1m_in_cached": 0,
2530      "cost_per_1m_out_cached": 0,
2531      "context_window": 262144,
2532      "default_max_tokens": 26214,
2533      "can_reason": false,
2534      "supports_attachments": false,
2535      "options": {}
2536    },
2537    {
2538      "id": "qwen/qwen3-coder-flash",
2539      "name": "Qwen: Qwen3 Coder Flash",
2540      "cost_per_1m_in": 0.3,
2541      "cost_per_1m_out": 1.5,
2542      "cost_per_1m_in_cached": 0,
2543      "cost_per_1m_out_cached": 0.08,
2544      "context_window": 128000,
2545      "default_max_tokens": 32768,
2546      "can_reason": false,
2547      "supports_attachments": false,
2548      "options": {}
2549    },
2550    {
2551      "id": "qwen/qwen3-coder-plus",
2552      "name": "Qwen: Qwen3 Coder Plus",
2553      "cost_per_1m_in": 1,
2554      "cost_per_1m_out": 5,
2555      "cost_per_1m_in_cached": 0,
2556      "cost_per_1m_out_cached": 0.09999999999999999,
2557      "context_window": 128000,
2558      "default_max_tokens": 32768,
2559      "can_reason": false,
2560      "supports_attachments": false,
2561      "options": {}
2562    },
2563    {
2564      "id": "qwen/qwen3-max",
2565      "name": "Qwen: Qwen3 Max",
2566      "cost_per_1m_in": 1.2,
2567      "cost_per_1m_out": 6,
2568      "cost_per_1m_in_cached": 0,
2569      "cost_per_1m_out_cached": 0.24,
2570      "context_window": 256000,
2571      "default_max_tokens": 16384,
2572      "can_reason": false,
2573      "supports_attachments": false,
2574      "options": {}
2575    },
2576    {
2577      "id": "qwen/qwen3-next-80b-a3b-instruct",
2578      "name": "Qwen: Qwen3 Next 80B A3B Instruct",
2579      "cost_per_1m_in": 0.15,
2580      "cost_per_1m_out": 1.5,
2581      "cost_per_1m_in_cached": 0,
2582      "cost_per_1m_out_cached": 0,
2583      "context_window": 262144,
2584      "default_max_tokens": 131072,
2585      "can_reason": false,
2586      "supports_attachments": false,
2587      "options": {}
2588    },
2589    {
2590      "id": "qwen/qwen3-next-80b-a3b-thinking",
2591      "name": "Qwen: Qwen3 Next 80B A3B Thinking",
2592      "cost_per_1m_in": 0.15,
2593      "cost_per_1m_out": 1.2,
2594      "cost_per_1m_in_cached": 0,
2595      "cost_per_1m_out_cached": 0,
2596      "context_window": 262144,
2597      "default_max_tokens": 131072,
2598      "can_reason": true,
2599      "reasoning_levels": [
2600        "low",
2601        "medium",
2602        "high"
2603      ],
2604      "default_reasoning_effort": "medium",
2605      "supports_attachments": false,
2606      "options": {}
2607    },
2608    {
2609      "id": "qwen/qwen3-vl-235b-a22b-instruct",
2610      "name": "Qwen: Qwen3 VL 235B A22B Instruct",
2611      "cost_per_1m_in": 0.22,
2612      "cost_per_1m_out": 0.88,
2613      "cost_per_1m_in_cached": 0,
2614      "cost_per_1m_out_cached": 0,
2615      "context_window": 262144,
2616      "default_max_tokens": 26214,
2617      "can_reason": false,
2618      "supports_attachments": true,
2619      "options": {}
2620    },
2621    {
2622      "id": "qwen/qwen3-vl-235b-a22b-thinking",
2623      "name": "Qwen: Qwen3 VL 235B A22B Thinking",
2624      "cost_per_1m_in": 0.3,
2625      "cost_per_1m_out": 1.2,
2626      "cost_per_1m_in_cached": 0,
2627      "cost_per_1m_out_cached": 0,
2628      "context_window": 262144,
2629      "default_max_tokens": 131072,
2630      "can_reason": true,
2631      "reasoning_levels": [
2632        "low",
2633        "medium",
2634        "high"
2635      ],
2636      "default_reasoning_effort": "medium",
2637      "supports_attachments": true,
2638      "options": {}
2639    },
2640    {
2641      "id": "qwen/qwen3-vl-30b-a3b-instruct",
2642      "name": "Qwen: Qwen3 VL 30B A3B Instruct",
2643      "cost_per_1m_in": 0.29,
2644      "cost_per_1m_out": 1,
2645      "cost_per_1m_in_cached": 0,
2646      "cost_per_1m_out_cached": 0,
2647      "context_window": 262144,
2648      "default_max_tokens": 131072,
2649      "can_reason": false,
2650      "supports_attachments": true,
2651      "options": {}
2652    },
2653    {
2654      "id": "qwen/qwen3-vl-30b-a3b-thinking",
2655      "name": "Qwen: Qwen3 VL 30B A3B Thinking",
2656      "cost_per_1m_in": 0.19999999999999998,
2657      "cost_per_1m_out": 1,
2658      "cost_per_1m_in_cached": 0,
2659      "cost_per_1m_out_cached": 0,
2660      "context_window": 131072,
2661      "default_max_tokens": 16384,
2662      "can_reason": true,
2663      "reasoning_levels": [
2664        "low",
2665        "medium",
2666        "high"
2667      ],
2668      "default_reasoning_effort": "medium",
2669      "supports_attachments": true,
2670      "options": {}
2671    },
2672    {
2673      "id": "qwen/qwen3-vl-8b-instruct",
2674      "name": "Qwen: Qwen3 VL 8B Instruct",
2675      "cost_per_1m_in": 0.18,
2676      "cost_per_1m_out": 0.7,
2677      "cost_per_1m_in_cached": 0,
2678      "cost_per_1m_out_cached": 0,
2679      "context_window": 256000,
2680      "default_max_tokens": 16384,
2681      "can_reason": false,
2682      "supports_attachments": true,
2683      "options": {}
2684    },
2685    {
2686      "id": "qwen/qwen3-vl-8b-thinking",
2687      "name": "Qwen: Qwen3 VL 8B Thinking",
2688      "cost_per_1m_in": 0.18,
2689      "cost_per_1m_out": 2.0999999999999996,
2690      "cost_per_1m_in_cached": 0,
2691      "cost_per_1m_out_cached": 0,
2692      "context_window": 256000,
2693      "default_max_tokens": 16384,
2694      "can_reason": true,
2695      "reasoning_levels": [
2696        "low",
2697        "medium",
2698        "high"
2699      ],
2700      "default_reasoning_effort": "medium",
2701      "supports_attachments": true,
2702      "options": {}
2703    },
2704    {
2705      "id": "openrouter/sherlock-dash-alpha",
2706      "name": "Sherlock Dash Alpha",
2707      "cost_per_1m_in": 0,
2708      "cost_per_1m_out": 0,
2709      "cost_per_1m_in_cached": 0,
2710      "cost_per_1m_out_cached": 0,
2711      "context_window": 1840000,
2712      "default_max_tokens": 32000,
2713      "can_reason": false,
2714      "supports_attachments": true,
2715      "options": {}
2716    },
2717    {
2718      "id": "openrouter/sherlock-think-alpha",
2719      "name": "Sherlock Think Alpha",
2720      "cost_per_1m_in": 0,
2721      "cost_per_1m_out": 0,
2722      "cost_per_1m_in_cached": 0,
2723      "cost_per_1m_out_cached": 0,
2724      "context_window": 1840000,
2725      "default_max_tokens": 32000,
2726      "can_reason": true,
2727      "reasoning_levels": [
2728        "low",
2729        "medium",
2730        "high"
2731      ],
2732      "default_reasoning_effort": "medium",
2733      "supports_attachments": true,
2734      "options": {}
2735    },
2736    {
2737      "id": "stepfun-ai/step3",
2738      "name": "StepFun: Step3",
2739      "cost_per_1m_in": 0.5700000000000001,
2740      "cost_per_1m_out": 1.42,
2741      "cost_per_1m_in_cached": 0,
2742      "cost_per_1m_out_cached": 0,
2743      "context_window": 65536,
2744      "default_max_tokens": 32768,
2745      "can_reason": true,
2746      "reasoning_levels": [
2747        "low",
2748        "medium",
2749        "high"
2750      ],
2751      "default_reasoning_effort": "medium",
2752      "supports_attachments": true,
2753      "options": {}
2754    },
2755    {
2756      "id": "tngtech/deepseek-r1t2-chimera",
2757      "name": "TNG: DeepSeek R1T2 Chimera",
2758      "cost_per_1m_in": 0.3,
2759      "cost_per_1m_out": 1.2,
2760      "cost_per_1m_in_cached": 0,
2761      "cost_per_1m_out_cached": 0,
2762      "context_window": 163840,
2763      "default_max_tokens": 81920,
2764      "can_reason": true,
2765      "reasoning_levels": [
2766        "low",
2767        "medium",
2768        "high"
2769      ],
2770      "default_reasoning_effort": "medium",
2771      "supports_attachments": false,
2772      "options": {}
2773    },
2774    {
2775      "id": "thedrummer/rocinante-12b",
2776      "name": "TheDrummer: Rocinante 12B",
2777      "cost_per_1m_in": 0.16999999999999998,
2778      "cost_per_1m_out": 0.43,
2779      "cost_per_1m_in_cached": 0,
2780      "cost_per_1m_out_cached": 0,
2781      "context_window": 32768,
2782      "default_max_tokens": 3276,
2783      "can_reason": false,
2784      "supports_attachments": false,
2785      "options": {}
2786    },
2787    {
2788      "id": "thedrummer/unslopnemo-12b",
2789      "name": "TheDrummer: UnslopNemo 12B",
2790      "cost_per_1m_in": 0.39999999999999997,
2791      "cost_per_1m_out": 0.39999999999999997,
2792      "cost_per_1m_in_cached": 0,
2793      "cost_per_1m_out_cached": 0,
2794      "context_window": 32768,
2795      "default_max_tokens": 3276,
2796      "can_reason": false,
2797      "supports_attachments": false,
2798      "options": {}
2799    },
2800    {
2801      "id": "alibaba/tongyi-deepresearch-30b-a3b",
2802      "name": "Tongyi DeepResearch 30B A3B",
2803      "cost_per_1m_in": 0.09,
2804      "cost_per_1m_out": 0.39999999999999997,
2805      "cost_per_1m_in_cached": 0,
2806      "cost_per_1m_out_cached": 0,
2807      "context_window": 131072,
2808      "default_max_tokens": 65536,
2809      "can_reason": true,
2810      "reasoning_levels": [
2811        "low",
2812        "medium",
2813        "high"
2814      ],
2815      "default_reasoning_effort": "medium",
2816      "supports_attachments": false,
2817      "options": {}
2818    },
2819    {
2820      "id": "alibaba/tongyi-deepresearch-30b-a3b:free",
2821      "name": "Tongyi DeepResearch 30B A3B (free)",
2822      "cost_per_1m_in": 0,
2823      "cost_per_1m_out": 0,
2824      "cost_per_1m_in_cached": 0,
2825      "cost_per_1m_out_cached": 0,
2826      "context_window": 131072,
2827      "default_max_tokens": 65536,
2828      "can_reason": true,
2829      "reasoning_levels": [
2830        "low",
2831        "medium",
2832        "high"
2833      ],
2834      "default_reasoning_effort": "medium",
2835      "supports_attachments": false,
2836      "options": {}
2837    },
2838    {
2839      "id": "z-ai/glm-4-32b",
2840      "name": "Z.AI: GLM 4 32B ",
2841      "cost_per_1m_in": 0.09999999999999999,
2842      "cost_per_1m_out": 0.09999999999999999,
2843      "cost_per_1m_in_cached": 0,
2844      "cost_per_1m_out_cached": 0,
2845      "context_window": 128000,
2846      "default_max_tokens": 12800,
2847      "can_reason": false,
2848      "supports_attachments": false,
2849      "options": {}
2850    },
2851    {
2852      "id": "z-ai/glm-4.5",
2853      "name": "Z.AI: GLM 4.5",
2854      "cost_per_1m_in": 0.35,
2855      "cost_per_1m_out": 1.5,
2856      "cost_per_1m_in_cached": 0,
2857      "cost_per_1m_out_cached": 0,
2858      "context_window": 131072,
2859      "default_max_tokens": 65536,
2860      "can_reason": true,
2861      "reasoning_levels": [
2862        "low",
2863        "medium",
2864        "high"
2865      ],
2866      "default_reasoning_effort": "medium",
2867      "supports_attachments": false,
2868      "options": {}
2869    },
2870    {
2871      "id": "z-ai/glm-4.5-air",
2872      "name": "Z.AI: GLM 4.5 Air",
2873      "cost_per_1m_in": 0.14,
2874      "cost_per_1m_out": 0.86,
2875      "cost_per_1m_in_cached": 0,
2876      "cost_per_1m_out_cached": 0,
2877      "context_window": 131072,
2878      "default_max_tokens": 65536,
2879      "can_reason": true,
2880      "reasoning_levels": [
2881        "low",
2882        "medium",
2883        "high"
2884      ],
2885      "default_reasoning_effort": "medium",
2886      "supports_attachments": false,
2887      "options": {}
2888    },
2889    {
2890      "id": "z-ai/glm-4.5-air:free",
2891      "name": "Z.AI: GLM 4.5 Air (free)",
2892      "cost_per_1m_in": 0,
2893      "cost_per_1m_out": 0,
2894      "cost_per_1m_in_cached": 0,
2895      "cost_per_1m_out_cached": 0,
2896      "context_window": 131072,
2897      "default_max_tokens": 48000,
2898      "can_reason": true,
2899      "reasoning_levels": [
2900        "low",
2901        "medium",
2902        "high"
2903      ],
2904      "default_reasoning_effort": "medium",
2905      "supports_attachments": false,
2906      "options": {}
2907    },
2908    {
2909      "id": "z-ai/glm-4.5v",
2910      "name": "Z.AI: GLM 4.5V",
2911      "cost_per_1m_in": 0.6,
2912      "cost_per_1m_out": 1.7999999999999998,
2913      "cost_per_1m_in_cached": 0,
2914      "cost_per_1m_out_cached": 0.11,
2915      "context_window": 65536,
2916      "default_max_tokens": 8192,
2917      "can_reason": true,
2918      "reasoning_levels": [
2919        "low",
2920        "medium",
2921        "high"
2922      ],
2923      "default_reasoning_effort": "medium",
2924      "supports_attachments": true,
2925      "options": {}
2926    },
2927    {
2928      "id": "z-ai/glm-4.6",
2929      "name": "Z.AI: GLM 4.6",
2930      "cost_per_1m_in": 0.6,
2931      "cost_per_1m_out": 2.2,
2932      "cost_per_1m_in_cached": 0,
2933      "cost_per_1m_out_cached": 0.11,
2934      "context_window": 204800,
2935      "default_max_tokens": 65536,
2936      "can_reason": true,
2937      "reasoning_levels": [
2938        "low",
2939        "medium",
2940        "high"
2941      ],
2942      "default_reasoning_effort": "medium",
2943      "supports_attachments": false,
2944      "options": {}
2945    },
2946    {
2947      "id": "z-ai/glm-4.6:exacto",
2948      "name": "Z.AI: GLM 4.6 (exacto)",
2949      "cost_per_1m_in": 0.6,
2950      "cost_per_1m_out": 2.2,
2951      "cost_per_1m_in_cached": 0,
2952      "cost_per_1m_out_cached": 0,
2953      "context_window": 204800,
2954      "default_max_tokens": 65536,
2955      "can_reason": true,
2956      "reasoning_levels": [
2957        "low",
2958        "medium",
2959        "high"
2960      ],
2961      "default_reasoning_effort": "medium",
2962      "supports_attachments": false,
2963      "options": {}
2964    },
2965    {
2966      "id": "x-ai/grok-3",
2967      "name": "xAI: Grok 3",
2968      "cost_per_1m_in": 5,
2969      "cost_per_1m_out": 25,
2970      "cost_per_1m_in_cached": 0,
2971      "cost_per_1m_out_cached": 1.25,
2972      "context_window": 131072,
2973      "default_max_tokens": 13107,
2974      "can_reason": false,
2975      "supports_attachments": false,
2976      "options": {}
2977    },
2978    {
2979      "id": "x-ai/grok-3-beta",
2980      "name": "xAI: Grok 3 Beta",
2981      "cost_per_1m_in": 5,
2982      "cost_per_1m_out": 25,
2983      "cost_per_1m_in_cached": 0,
2984      "cost_per_1m_out_cached": 1.25,
2985      "context_window": 131072,
2986      "default_max_tokens": 13107,
2987      "can_reason": false,
2988      "supports_attachments": false,
2989      "options": {}
2990    },
2991    {
2992      "id": "x-ai/grok-3-mini",
2993      "name": "xAI: Grok 3 Mini",
2994      "cost_per_1m_in": 0.3,
2995      "cost_per_1m_out": 0.5,
2996      "cost_per_1m_in_cached": 0,
2997      "cost_per_1m_out_cached": 0.075,
2998      "context_window": 131072,
2999      "default_max_tokens": 13107,
3000      "can_reason": true,
3001      "reasoning_levels": [
3002        "low",
3003        "medium",
3004        "high"
3005      ],
3006      "default_reasoning_effort": "medium",
3007      "supports_attachments": false,
3008      "options": {}
3009    },
3010    {
3011      "id": "x-ai/grok-3-mini-beta",
3012      "name": "xAI: Grok 3 Mini Beta",
3013      "cost_per_1m_in": 0.3,
3014      "cost_per_1m_out": 0.5,
3015      "cost_per_1m_in_cached": 0,
3016      "cost_per_1m_out_cached": 0.075,
3017      "context_window": 131072,
3018      "default_max_tokens": 13107,
3019      "can_reason": true,
3020      "reasoning_levels": [
3021        "low",
3022        "medium",
3023        "high"
3024      ],
3025      "default_reasoning_effort": "medium",
3026      "supports_attachments": false,
3027      "options": {}
3028    },
3029    {
3030      "id": "x-ai/grok-4",
3031      "name": "xAI: Grok 4",
3032      "cost_per_1m_in": 3,
3033      "cost_per_1m_out": 15,
3034      "cost_per_1m_in_cached": 0,
3035      "cost_per_1m_out_cached": 0.75,
3036      "context_window": 256000,
3037      "default_max_tokens": 25600,
3038      "can_reason": true,
3039      "reasoning_levels": [
3040        "low",
3041        "medium",
3042        "high"
3043      ],
3044      "default_reasoning_effort": "medium",
3045      "supports_attachments": true,
3046      "options": {}
3047    },
3048    {
3049      "id": "x-ai/grok-4-fast",
3050      "name": "xAI: Grok 4 Fast",
3051      "cost_per_1m_in": 0.19999999999999998,
3052      "cost_per_1m_out": 0.5,
3053      "cost_per_1m_in_cached": 0,
3054      "cost_per_1m_out_cached": 0.049999999999999996,
3055      "context_window": 2000000,
3056      "default_max_tokens": 15000,
3057      "can_reason": true,
3058      "reasoning_levels": [
3059        "low",
3060        "medium",
3061        "high"
3062      ],
3063      "default_reasoning_effort": "medium",
3064      "supports_attachments": true,
3065      "options": {}
3066    },
3067    {
3068      "id": "x-ai/grok-code-fast-1",
3069      "name": "xAI: Grok Code Fast 1",
3070      "cost_per_1m_in": 0.19999999999999998,
3071      "cost_per_1m_out": 1.5,
3072      "cost_per_1m_in_cached": 0,
3073      "cost_per_1m_out_cached": 0.02,
3074      "context_window": 256000,
3075      "default_max_tokens": 5000,
3076      "can_reason": true,
3077      "reasoning_levels": [
3078        "low",
3079        "medium",
3080        "high"
3081      ],
3082      "default_reasoning_effort": "medium",
3083      "supports_attachments": false,
3084      "options": {}
3085    }
3086  ],
3087  "default_headers": {
3088    "HTTP-Referer": "https://charm.land",
3089    "X-Title": "Crush"
3090  }
3091}