openrouter.json

   1{
   2  "name": "OpenRouter",
   3  "id": "openrouter",
   4  "api_key": "$OPENROUTER_API_KEY",
   5  "api_endpoint": "https://openrouter.ai/api/v1",
   6  "type": "openrouter",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-3.5-haiku",
   9  "models": [
  10    {
  11      "id": "ai21/jamba-large-1.7",
  12      "name": "AI21: Jamba Large 1.7",
  13      "cost_per_1m_in": 2,
  14      "cost_per_1m_out": 8,
  15      "cost_per_1m_in_cached": 0,
  16      "cost_per_1m_out_cached": 0,
  17      "context_window": 256000,
  18      "default_max_tokens": 2048,
  19      "can_reason": false,
  20      "supports_attachments": false,
  21      "options": {}
  22    },
  23    {
  24      "id": "ai21/jamba-mini-1.7",
  25      "name": "AI21: Jamba Mini 1.7",
  26      "cost_per_1m_in": 0.19999999999999998,
  27      "cost_per_1m_out": 0.39999999999999997,
  28      "cost_per_1m_in_cached": 0,
  29      "cost_per_1m_out_cached": 0,
  30      "context_window": 256000,
  31      "default_max_tokens": 2048,
  32      "can_reason": false,
  33      "supports_attachments": false,
  34      "options": {}
  35    },
  36    {
  37      "id": "allenai/olmo-3-7b-instruct",
  38      "name": "AllenAI: Olmo 3 7B Instruct",
  39      "cost_per_1m_in": 0.09999999999999999,
  40      "cost_per_1m_out": 0.19999999999999998,
  41      "cost_per_1m_in_cached": 0,
  42      "cost_per_1m_out_cached": 0,
  43      "context_window": 65536,
  44      "default_max_tokens": 32768,
  45      "can_reason": false,
  46      "supports_attachments": false,
  47      "options": {}
  48    },
  49    {
  50      "id": "amazon/nova-lite-v1",
  51      "name": "Amazon: Nova Lite 1.0",
  52      "cost_per_1m_in": 0.06,
  53      "cost_per_1m_out": 0.24,
  54      "cost_per_1m_in_cached": 0,
  55      "cost_per_1m_out_cached": 0,
  56      "context_window": 300000,
  57      "default_max_tokens": 2560,
  58      "can_reason": false,
  59      "supports_attachments": true,
  60      "options": {}
  61    },
  62    {
  63      "id": "amazon/nova-micro-v1",
  64      "name": "Amazon: Nova Micro 1.0",
  65      "cost_per_1m_in": 0.035,
  66      "cost_per_1m_out": 0.14,
  67      "cost_per_1m_in_cached": 0,
  68      "cost_per_1m_out_cached": 0,
  69      "context_window": 128000,
  70      "default_max_tokens": 2560,
  71      "can_reason": false,
  72      "supports_attachments": false,
  73      "options": {}
  74    },
  75    {
  76      "id": "amazon/nova-premier-v1",
  77      "name": "Amazon: Nova Premier 1.0",
  78      "cost_per_1m_in": 2.5,
  79      "cost_per_1m_out": 12.5,
  80      "cost_per_1m_in_cached": 0,
  81      "cost_per_1m_out_cached": 0.625,
  82      "context_window": 1000000,
  83      "default_max_tokens": 16000,
  84      "can_reason": false,
  85      "supports_attachments": true,
  86      "options": {}
  87    },
  88    {
  89      "id": "amazon/nova-pro-v1",
  90      "name": "Amazon: Nova Pro 1.0",
  91      "cost_per_1m_in": 0.7999999999999999,
  92      "cost_per_1m_out": 3.1999999999999997,
  93      "cost_per_1m_in_cached": 0,
  94      "cost_per_1m_out_cached": 0,
  95      "context_window": 300000,
  96      "default_max_tokens": 2560,
  97      "can_reason": false,
  98      "supports_attachments": true,
  99      "options": {}
 100    },
 101    {
 102      "id": "anthropic/claude-3-haiku",
 103      "name": "Anthropic: Claude 3 Haiku",
 104      "cost_per_1m_in": 0.25,
 105      "cost_per_1m_out": 1.25,
 106      "cost_per_1m_in_cached": 0.3,
 107      "cost_per_1m_out_cached": 0.03,
 108      "context_window": 200000,
 109      "default_max_tokens": 2048,
 110      "can_reason": false,
 111      "supports_attachments": true,
 112      "options": {}
 113    },
 114    {
 115      "id": "anthropic/claude-3-opus",
 116      "name": "Anthropic: Claude 3 Opus",
 117      "cost_per_1m_in": 15,
 118      "cost_per_1m_out": 75,
 119      "cost_per_1m_in_cached": 18.75,
 120      "cost_per_1m_out_cached": 1.5,
 121      "context_window": 200000,
 122      "default_max_tokens": 2048,
 123      "can_reason": false,
 124      "supports_attachments": true,
 125      "options": {}
 126    },
 127    {
 128      "id": "anthropic/claude-3.5-haiku",
 129      "name": "Anthropic: Claude 3.5 Haiku",
 130      "cost_per_1m_in": 0.7999999999999999,
 131      "cost_per_1m_out": 4,
 132      "cost_per_1m_in_cached": 1,
 133      "cost_per_1m_out_cached": 0.08,
 134      "context_window": 200000,
 135      "default_max_tokens": 4096,
 136      "can_reason": false,
 137      "supports_attachments": true,
 138      "options": {}
 139    },
 140    {
 141      "id": "anthropic/claude-3.5-haiku-20241022",
 142      "name": "Anthropic: Claude 3.5 Haiku (2024-10-22)",
 143      "cost_per_1m_in": 0.7999999999999999,
 144      "cost_per_1m_out": 4,
 145      "cost_per_1m_in_cached": 1,
 146      "cost_per_1m_out_cached": 0.08,
 147      "context_window": 200000,
 148      "default_max_tokens": 4096,
 149      "can_reason": false,
 150      "supports_attachments": true,
 151      "options": {}
 152    },
 153    {
 154      "id": "anthropic/claude-3.5-sonnet",
 155      "name": "Anthropic: Claude 3.5 Sonnet",
 156      "cost_per_1m_in": 3,
 157      "cost_per_1m_out": 15,
 158      "cost_per_1m_in_cached": 3.75,
 159      "cost_per_1m_out_cached": 0.3,
 160      "context_window": 200000,
 161      "default_max_tokens": 4096,
 162      "can_reason": false,
 163      "supports_attachments": true,
 164      "options": {}
 165    },
 166    {
 167      "id": "anthropic/claude-3.7-sonnet",
 168      "name": "Anthropic: Claude 3.7 Sonnet",
 169      "cost_per_1m_in": 3,
 170      "cost_per_1m_out": 15,
 171      "cost_per_1m_in_cached": 3.75,
 172      "cost_per_1m_out_cached": 0.3,
 173      "context_window": 200000,
 174      "default_max_tokens": 32000,
 175      "can_reason": true,
 176      "reasoning_levels": [
 177        "low",
 178        "medium",
 179        "high"
 180      ],
 181      "default_reasoning_effort": "medium",
 182      "supports_attachments": true,
 183      "options": {}
 184    },
 185    {
 186      "id": "anthropic/claude-3.7-sonnet:thinking",
 187      "name": "Anthropic: Claude 3.7 Sonnet (thinking)",
 188      "cost_per_1m_in": 3,
 189      "cost_per_1m_out": 15,
 190      "cost_per_1m_in_cached": 3.75,
 191      "cost_per_1m_out_cached": 0.3,
 192      "context_window": 200000,
 193      "default_max_tokens": 32000,
 194      "can_reason": true,
 195      "reasoning_levels": [
 196        "low",
 197        "medium",
 198        "high"
 199      ],
 200      "default_reasoning_effort": "medium",
 201      "supports_attachments": true,
 202      "options": {}
 203    },
 204    {
 205      "id": "anthropic/claude-haiku-4.5",
 206      "name": "Anthropic: Claude Haiku 4.5",
 207      "cost_per_1m_in": 1,
 208      "cost_per_1m_out": 5,
 209      "cost_per_1m_in_cached": 1.25,
 210      "cost_per_1m_out_cached": 0.09999999999999999,
 211      "context_window": 200000,
 212      "default_max_tokens": 32000,
 213      "can_reason": true,
 214      "reasoning_levels": [
 215        "low",
 216        "medium",
 217        "high"
 218      ],
 219      "default_reasoning_effort": "medium",
 220      "supports_attachments": true,
 221      "options": {}
 222    },
 223    {
 224      "id": "anthropic/claude-opus-4",
 225      "name": "Anthropic: Claude Opus 4",
 226      "cost_per_1m_in": 15,
 227      "cost_per_1m_out": 75,
 228      "cost_per_1m_in_cached": 18.75,
 229      "cost_per_1m_out_cached": 1.5,
 230      "context_window": 200000,
 231      "default_max_tokens": 16000,
 232      "can_reason": true,
 233      "reasoning_levels": [
 234        "low",
 235        "medium",
 236        "high"
 237      ],
 238      "default_reasoning_effort": "medium",
 239      "supports_attachments": true,
 240      "options": {}
 241    },
 242    {
 243      "id": "anthropic/claude-opus-4.1",
 244      "name": "Anthropic: Claude Opus 4.1",
 245      "cost_per_1m_in": 15,
 246      "cost_per_1m_out": 75,
 247      "cost_per_1m_in_cached": 18.75,
 248      "cost_per_1m_out_cached": 1.5,
 249      "context_window": 200000,
 250      "default_max_tokens": 16000,
 251      "can_reason": true,
 252      "reasoning_levels": [
 253        "low",
 254        "medium",
 255        "high"
 256      ],
 257      "default_reasoning_effort": "medium",
 258      "supports_attachments": true,
 259      "options": {}
 260    },
 261    {
 262      "id": "anthropic/claude-opus-4.5",
 263      "name": "Anthropic: Claude Opus 4.5",
 264      "cost_per_1m_in": 5,
 265      "cost_per_1m_out": 25,
 266      "cost_per_1m_in_cached": 6.25,
 267      "cost_per_1m_out_cached": 0.5,
 268      "context_window": 200000,
 269      "default_max_tokens": 32000,
 270      "can_reason": true,
 271      "reasoning_levels": [
 272        "low",
 273        "medium",
 274        "high"
 275      ],
 276      "default_reasoning_effort": "medium",
 277      "supports_attachments": true,
 278      "options": {}
 279    },
 280    {
 281      "id": "anthropic/claude-sonnet-4",
 282      "name": "Anthropic: Claude Sonnet 4",
 283      "cost_per_1m_in": 3,
 284      "cost_per_1m_out": 15,
 285      "cost_per_1m_in_cached": 3.75,
 286      "cost_per_1m_out_cached": 0.3,
 287      "context_window": 1000000,
 288      "default_max_tokens": 32000,
 289      "can_reason": true,
 290      "reasoning_levels": [
 291        "low",
 292        "medium",
 293        "high"
 294      ],
 295      "default_reasoning_effort": "medium",
 296      "supports_attachments": true,
 297      "options": {}
 298    },
 299    {
 300      "id": "anthropic/claude-sonnet-4.5",
 301      "name": "Anthropic: Claude Sonnet 4.5",
 302      "cost_per_1m_in": 3,
 303      "cost_per_1m_out": 15,
 304      "cost_per_1m_in_cached": 3.75,
 305      "cost_per_1m_out_cached": 0.3,
 306      "context_window": 1000000,
 307      "default_max_tokens": 32000,
 308      "can_reason": true,
 309      "reasoning_levels": [
 310        "low",
 311        "medium",
 312        "high"
 313      ],
 314      "default_reasoning_effort": "medium",
 315      "supports_attachments": true,
 316      "options": {}
 317    },
 318    {
 319      "id": "arcee-ai/virtuoso-large",
 320      "name": "Arcee AI: Virtuoso Large",
 321      "cost_per_1m_in": 0.75,
 322      "cost_per_1m_out": 1.2,
 323      "cost_per_1m_in_cached": 0,
 324      "cost_per_1m_out_cached": 0,
 325      "context_window": 131072,
 326      "default_max_tokens": 32000,
 327      "can_reason": false,
 328      "supports_attachments": false,
 329      "options": {}
 330    },
 331    {
 332      "id": "baidu/ernie-4.5-21b-a3b",
 333      "name": "Baidu: ERNIE 4.5 21B A3B",
 334      "cost_per_1m_in": 0.056,
 335      "cost_per_1m_out": 0.224,
 336      "cost_per_1m_in_cached": 0,
 337      "cost_per_1m_out_cached": 0,
 338      "context_window": 120000,
 339      "default_max_tokens": 4000,
 340      "can_reason": false,
 341      "supports_attachments": false,
 342      "options": {}
 343    },
 344    {
 345      "id": "baidu/ernie-4.5-vl-28b-a3b",
 346      "name": "Baidu: ERNIE 4.5 VL 28B A3B",
 347      "cost_per_1m_in": 0.112,
 348      "cost_per_1m_out": 0.448,
 349      "cost_per_1m_in_cached": 0,
 350      "cost_per_1m_out_cached": 0,
 351      "context_window": 30000,
 352      "default_max_tokens": 4000,
 353      "can_reason": true,
 354      "reasoning_levels": [
 355        "low",
 356        "medium",
 357        "high"
 358      ],
 359      "default_reasoning_effort": "medium",
 360      "supports_attachments": true,
 361      "options": {}
 362    },
 363    {
 364      "id": "deepcogito/cogito-v2-preview-llama-109b-moe",
 365      "name": "Cogito V2 Preview Llama 109B",
 366      "cost_per_1m_in": 0.18,
 367      "cost_per_1m_out": 0.59,
 368      "cost_per_1m_in_cached": 0,
 369      "cost_per_1m_out_cached": 0,
 370      "context_window": 32767,
 371      "default_max_tokens": 3276,
 372      "can_reason": true,
 373      "reasoning_levels": [
 374        "low",
 375        "medium",
 376        "high"
 377      ],
 378      "default_reasoning_effort": "medium",
 379      "supports_attachments": true,
 380      "options": {}
 381    },
 382    {
 383      "id": "cohere/command-r-08-2024",
 384      "name": "Cohere: Command R (08-2024)",
 385      "cost_per_1m_in": 0.15,
 386      "cost_per_1m_out": 0.6,
 387      "cost_per_1m_in_cached": 0,
 388      "cost_per_1m_out_cached": 0,
 389      "context_window": 128000,
 390      "default_max_tokens": 2000,
 391      "can_reason": false,
 392      "supports_attachments": false,
 393      "options": {}
 394    },
 395    {
 396      "id": "cohere/command-r-plus-08-2024",
 397      "name": "Cohere: Command R+ (08-2024)",
 398      "cost_per_1m_in": 2.5,
 399      "cost_per_1m_out": 10,
 400      "cost_per_1m_in_cached": 0,
 401      "cost_per_1m_out_cached": 0,
 402      "context_window": 128000,
 403      "default_max_tokens": 2000,
 404      "can_reason": false,
 405      "supports_attachments": false,
 406      "options": {}
 407    },
 408    {
 409      "id": "deepcogito/cogito-v2-preview-llama-405b",
 410      "name": "Deep Cogito: Cogito V2 Preview Llama 405B",
 411      "cost_per_1m_in": 3.5,
 412      "cost_per_1m_out": 3.5,
 413      "cost_per_1m_in_cached": 0,
 414      "cost_per_1m_out_cached": 0,
 415      "context_window": 32768,
 416      "default_max_tokens": 3276,
 417      "can_reason": true,
 418      "reasoning_levels": [
 419        "low",
 420        "medium",
 421        "high"
 422      ],
 423      "default_reasoning_effort": "medium",
 424      "supports_attachments": false,
 425      "options": {}
 426    },
 427    {
 428      "id": "deepcogito/cogito-v2-preview-llama-70b",
 429      "name": "Deep Cogito: Cogito V2 Preview Llama 70B",
 430      "cost_per_1m_in": 0.88,
 431      "cost_per_1m_out": 0.88,
 432      "cost_per_1m_in_cached": 0,
 433      "cost_per_1m_out_cached": 0,
 434      "context_window": 32768,
 435      "default_max_tokens": 3276,
 436      "can_reason": true,
 437      "reasoning_levels": [
 438        "low",
 439        "medium",
 440        "high"
 441      ],
 442      "default_reasoning_effort": "medium",
 443      "supports_attachments": false,
 444      "options": {}
 445    },
 446    {
 447      "id": "deepseek/deepseek-chat",
 448      "name": "DeepSeek: DeepSeek V3",
 449      "cost_per_1m_in": 0.32,
 450      "cost_per_1m_out": 1.04,
 451      "cost_per_1m_in_cached": 0,
 452      "cost_per_1m_out_cached": 0,
 453      "context_window": 64000,
 454      "default_max_tokens": 8000,
 455      "can_reason": false,
 456      "supports_attachments": false,
 457      "options": {}
 458    },
 459    {
 460      "id": "deepseek/deepseek-chat-v3-0324",
 461      "name": "DeepSeek: DeepSeek V3 0324",
 462      "cost_per_1m_in": 0.77,
 463      "cost_per_1m_out": 0.77,
 464      "cost_per_1m_in_cached": 0,
 465      "cost_per_1m_out_cached": 0,
 466      "context_window": 163840,
 467      "default_max_tokens": 65536,
 468      "can_reason": true,
 469      "reasoning_levels": [
 470        "low",
 471        "medium",
 472        "high"
 473      ],
 474      "default_reasoning_effort": "medium",
 475      "supports_attachments": false,
 476      "options": {}
 477    },
 478    {
 479      "id": "deepseek/deepseek-chat-v3.1",
 480      "name": "DeepSeek: DeepSeek V3.1",
 481      "cost_per_1m_in": 0.27,
 482      "cost_per_1m_out": 1,
 483      "cost_per_1m_in_cached": 0,
 484      "cost_per_1m_out_cached": 0,
 485      "context_window": 163840,
 486      "default_max_tokens": 16384,
 487      "can_reason": true,
 488      "reasoning_levels": [
 489        "low",
 490        "medium",
 491        "high"
 492      ],
 493      "default_reasoning_effort": "medium",
 494      "supports_attachments": false,
 495      "options": {}
 496    },
 497    {
 498      "id": "deepseek/deepseek-v3.1-terminus",
 499      "name": "DeepSeek: DeepSeek V3.1 Terminus",
 500      "cost_per_1m_in": 0.22999999999999998,
 501      "cost_per_1m_out": 0.8999999999999999,
 502      "cost_per_1m_in_cached": 0,
 503      "cost_per_1m_out_cached": 0,
 504      "context_window": 163840,
 505      "default_max_tokens": 81920,
 506      "can_reason": true,
 507      "reasoning_levels": [
 508        "low",
 509        "medium",
 510        "high"
 511      ],
 512      "default_reasoning_effort": "medium",
 513      "supports_attachments": false,
 514      "options": {}
 515    },
 516    {
 517      "id": "deepseek/deepseek-v3.1-terminus:exacto",
 518      "name": "DeepSeek: DeepSeek V3.1 Terminus (exacto)",
 519      "cost_per_1m_in": 0.27,
 520      "cost_per_1m_out": 1,
 521      "cost_per_1m_in_cached": 0,
 522      "cost_per_1m_out_cached": 0,
 523      "context_window": 163840,
 524      "default_max_tokens": 16384,
 525      "can_reason": true,
 526      "reasoning_levels": [
 527        "low",
 528        "medium",
 529        "high"
 530      ],
 531      "default_reasoning_effort": "medium",
 532      "supports_attachments": false,
 533      "options": {}
 534    },
 535    {
 536      "id": "deepseek/deepseek-v3.2-exp",
 537      "name": "DeepSeek: DeepSeek V3.2 Exp",
 538      "cost_per_1m_in": 0.216,
 539      "cost_per_1m_out": 0.328,
 540      "cost_per_1m_in_cached": 0,
 541      "cost_per_1m_out_cached": 0,
 542      "context_window": 163840,
 543      "default_max_tokens": 32768,
 544      "can_reason": true,
 545      "reasoning_levels": [
 546        "low",
 547        "medium",
 548        "high"
 549      ],
 550      "default_reasoning_effort": "medium",
 551      "supports_attachments": false,
 552      "options": {}
 553    },
 554    {
 555      "id": "deepseek/deepseek-r1",
 556      "name": "DeepSeek: R1",
 557      "cost_per_1m_in": 0.7,
 558      "cost_per_1m_out": 2.4,
 559      "cost_per_1m_in_cached": 0,
 560      "cost_per_1m_out_cached": 0,
 561      "context_window": 163840,
 562      "default_max_tokens": 81920,
 563      "can_reason": true,
 564      "reasoning_levels": [
 565        "low",
 566        "medium",
 567        "high"
 568      ],
 569      "default_reasoning_effort": "medium",
 570      "supports_attachments": false,
 571      "options": {}
 572    },
 573    {
 574      "id": "deepseek/deepseek-r1-0528",
 575      "name": "DeepSeek: R1 0528",
 576      "cost_per_1m_in": 0.39999999999999997,
 577      "cost_per_1m_out": 1.75,
 578      "cost_per_1m_in_cached": 0,
 579      "cost_per_1m_out_cached": 0,
 580      "context_window": 163840,
 581      "default_max_tokens": 81920,
 582      "can_reason": true,
 583      "reasoning_levels": [
 584        "low",
 585        "medium",
 586        "high"
 587      ],
 588      "default_reasoning_effort": "medium",
 589      "supports_attachments": false,
 590      "options": {}
 591    },
 592    {
 593      "id": "deepseek/deepseek-r1-distill-llama-70b",
 594      "name": "DeepSeek: R1 Distill Llama 70B",
 595      "cost_per_1m_in": 0.03,
 596      "cost_per_1m_out": 0.13,
 597      "cost_per_1m_in_cached": 0,
 598      "cost_per_1m_out_cached": 0,
 599      "context_window": 131072,
 600      "default_max_tokens": 65536,
 601      "can_reason": true,
 602      "reasoning_levels": [
 603        "low",
 604        "medium",
 605        "high"
 606      ],
 607      "default_reasoning_effort": "medium",
 608      "supports_attachments": false,
 609      "options": {}
 610    },
 611    {
 612      "id": "google/gemini-2.0-flash-001",
 613      "name": "Google: Gemini 2.0 Flash",
 614      "cost_per_1m_in": 0.09999999999999999,
 615      "cost_per_1m_out": 0.39999999999999997,
 616      "cost_per_1m_in_cached": 0.18330000000000002,
 617      "cost_per_1m_out_cached": 0.024999999999999998,
 618      "context_window": 1048576,
 619      "default_max_tokens": 4096,
 620      "can_reason": false,
 621      "supports_attachments": true,
 622      "options": {}
 623    },
 624    {
 625      "id": "google/gemini-2.0-flash-exp:free",
 626      "name": "Google: Gemini 2.0 Flash Experimental (free)",
 627      "cost_per_1m_in": 0,
 628      "cost_per_1m_out": 0,
 629      "cost_per_1m_in_cached": 0,
 630      "cost_per_1m_out_cached": 0,
 631      "context_window": 1048576,
 632      "default_max_tokens": 4096,
 633      "can_reason": false,
 634      "supports_attachments": true,
 635      "options": {}
 636    },
 637    {
 638      "id": "google/gemini-2.0-flash-lite-001",
 639      "name": "Google: Gemini 2.0 Flash Lite",
 640      "cost_per_1m_in": 0.075,
 641      "cost_per_1m_out": 0.3,
 642      "cost_per_1m_in_cached": 0,
 643      "cost_per_1m_out_cached": 0,
 644      "context_window": 1048576,
 645      "default_max_tokens": 4096,
 646      "can_reason": false,
 647      "supports_attachments": true,
 648      "options": {}
 649    },
 650    {
 651      "id": "google/gemini-2.5-flash",
 652      "name": "Google: Gemini 2.5 Flash",
 653      "cost_per_1m_in": 0.3,
 654      "cost_per_1m_out": 2.5,
 655      "cost_per_1m_in_cached": 0.3833,
 656      "cost_per_1m_out_cached": 0.03,
 657      "context_window": 1048576,
 658      "default_max_tokens": 32767,
 659      "can_reason": true,
 660      "reasoning_levels": [
 661        "low",
 662        "medium",
 663        "high"
 664      ],
 665      "default_reasoning_effort": "medium",
 666      "supports_attachments": true,
 667      "options": {}
 668    },
 669    {
 670      "id": "google/gemini-2.5-flash-lite",
 671      "name": "Google: Gemini 2.5 Flash Lite",
 672      "cost_per_1m_in": 0.09999999999999999,
 673      "cost_per_1m_out": 0.39999999999999997,
 674      "cost_per_1m_in_cached": 0.18330000000000002,
 675      "cost_per_1m_out_cached": 0.024999999999999998,
 676      "context_window": 1048576,
 677      "default_max_tokens": 32767,
 678      "can_reason": true,
 679      "reasoning_levels": [
 680        "low",
 681        "medium",
 682        "high"
 683      ],
 684      "default_reasoning_effort": "medium",
 685      "supports_attachments": true,
 686      "options": {}
 687    },
 688    {
 689      "id": "google/gemini-2.5-flash-lite-preview-09-2025",
 690      "name": "Google: Gemini 2.5 Flash Lite Preview 09-2025",
 691      "cost_per_1m_in": 0.09999999999999999,
 692      "cost_per_1m_out": 0.39999999999999997,
 693      "cost_per_1m_in_cached": 0,
 694      "cost_per_1m_out_cached": 0,
 695      "context_window": 1048576,
 696      "default_max_tokens": 32767,
 697      "can_reason": true,
 698      "reasoning_levels": [
 699        "low",
 700        "medium",
 701        "high"
 702      ],
 703      "default_reasoning_effort": "medium",
 704      "supports_attachments": true,
 705      "options": {}
 706    },
 707    {
 708      "id": "google/gemini-2.5-flash-preview-09-2025",
 709      "name": "Google: Gemini 2.5 Flash Preview 09-2025",
 710      "cost_per_1m_in": 0.3,
 711      "cost_per_1m_out": 2.5,
 712      "cost_per_1m_in_cached": 0.3833,
 713      "cost_per_1m_out_cached": 0.075,
 714      "context_window": 1048576,
 715      "default_max_tokens": 32767,
 716      "can_reason": true,
 717      "reasoning_levels": [
 718        "low",
 719        "medium",
 720        "high"
 721      ],
 722      "default_reasoning_effort": "medium",
 723      "supports_attachments": true,
 724      "options": {}
 725    },
 726    {
 727      "id": "google/gemini-2.5-pro",
 728      "name": "Google: Gemini 2.5 Pro",
 729      "cost_per_1m_in": 1.25,
 730      "cost_per_1m_out": 10,
 731      "cost_per_1m_in_cached": 1.625,
 732      "cost_per_1m_out_cached": 0.125,
 733      "context_window": 1048576,
 734      "default_max_tokens": 32768,
 735      "can_reason": true,
 736      "reasoning_levels": [
 737        "low",
 738        "medium",
 739        "high"
 740      ],
 741      "default_reasoning_effort": "medium",
 742      "supports_attachments": true,
 743      "options": {}
 744    },
 745    {
 746      "id": "google/gemini-2.5-pro-preview-05-06",
 747      "name": "Google: Gemini 2.5 Pro Preview 05-06",
 748      "cost_per_1m_in": 1.25,
 749      "cost_per_1m_out": 10,
 750      "cost_per_1m_in_cached": 1.625,
 751      "cost_per_1m_out_cached": 0.125,
 752      "context_window": 1048576,
 753      "default_max_tokens": 32768,
 754      "can_reason": true,
 755      "reasoning_levels": [
 756        "low",
 757        "medium",
 758        "high"
 759      ],
 760      "default_reasoning_effort": "medium",
 761      "supports_attachments": true,
 762      "options": {}
 763    },
 764    {
 765      "id": "google/gemini-2.5-pro-preview",
 766      "name": "Google: Gemini 2.5 Pro Preview 06-05",
 767      "cost_per_1m_in": 1.25,
 768      "cost_per_1m_out": 10,
 769      "cost_per_1m_in_cached": 1.625,
 770      "cost_per_1m_out_cached": 0.125,
 771      "context_window": 1048576,
 772      "default_max_tokens": 32768,
 773      "can_reason": true,
 774      "reasoning_levels": [
 775        "low",
 776        "medium",
 777        "high"
 778      ],
 779      "default_reasoning_effort": "medium",
 780      "supports_attachments": true,
 781      "options": {}
 782    },
 783    {
 784      "id": "google/gemini-3-pro-preview",
 785      "name": "Google: Gemini 3 Pro Preview",
 786      "cost_per_1m_in": 2,
 787      "cost_per_1m_out": 12,
 788      "cost_per_1m_in_cached": 2.375,
 789      "cost_per_1m_out_cached": 0.19999999999999998,
 790      "context_window": 1048576,
 791      "default_max_tokens": 32768,
 792      "can_reason": true,
 793      "reasoning_levels": [
 794        "low",
 795        "medium",
 796        "high"
 797      ],
 798      "default_reasoning_effort": "medium",
 799      "supports_attachments": true,
 800      "options": {}
 801    },
 802    {
 803      "id": "google/gemma-3-27b-it",
 804      "name": "Google: Gemma 3 27B",
 805      "cost_per_1m_in": 0.13,
 806      "cost_per_1m_out": 0.52,
 807      "cost_per_1m_in_cached": 0,
 808      "cost_per_1m_out_cached": 0,
 809      "context_window": 96000,
 810      "default_max_tokens": 48000,
 811      "can_reason": false,
 812      "supports_attachments": true,
 813      "options": {}
 814    },
 815    {
 816      "id": "inception/mercury",
 817      "name": "Inception: Mercury",
 818      "cost_per_1m_in": 0.25,
 819      "cost_per_1m_out": 1,
 820      "cost_per_1m_in_cached": 0,
 821      "cost_per_1m_out_cached": 0,
 822      "context_window": 128000,
 823      "default_max_tokens": 8192,
 824      "can_reason": false,
 825      "supports_attachments": false,
 826      "options": {}
 827    },
 828    {
 829      "id": "inception/mercury-coder",
 830      "name": "Inception: Mercury Coder",
 831      "cost_per_1m_in": 0.25,
 832      "cost_per_1m_out": 1,
 833      "cost_per_1m_in_cached": 0,
 834      "cost_per_1m_out_cached": 0,
 835      "context_window": 128000,
 836      "default_max_tokens": 8192,
 837      "can_reason": false,
 838      "supports_attachments": false,
 839      "options": {}
 840    },
 841    {
 842      "id": "kwaipilot/kat-coder-pro:free",
 843      "name": "Kwaipilot: KAT-Coder-Pro V1 (free)",
 844      "cost_per_1m_in": 0,
 845      "cost_per_1m_out": 0,
 846      "cost_per_1m_in_cached": 0,
 847      "cost_per_1m_out_cached": 0,
 848      "context_window": 256000,
 849      "default_max_tokens": 16000,
 850      "can_reason": false,
 851      "supports_attachments": false,
 852      "options": {}
 853    },
 854    {
 855      "id": "meituan/longcat-flash-chat:free",
 856      "name": "Meituan: LongCat Flash Chat (free)",
 857      "cost_per_1m_in": 0,
 858      "cost_per_1m_out": 0,
 859      "cost_per_1m_in_cached": 0,
 860      "cost_per_1m_out_cached": 0,
 861      "context_window": 131072,
 862      "default_max_tokens": 65536,
 863      "can_reason": false,
 864      "supports_attachments": false,
 865      "options": {}
 866    },
 867    {
 868      "id": "meta-llama/llama-3.1-405b-instruct",
 869      "name": "Meta: Llama 3.1 405B Instruct",
 870      "cost_per_1m_in": 3.5,
 871      "cost_per_1m_out": 3.5,
 872      "cost_per_1m_in_cached": 0,
 873      "cost_per_1m_out_cached": 0,
 874      "context_window": 130815,
 875      "default_max_tokens": 13081,
 876      "can_reason": false,
 877      "supports_attachments": false,
 878      "options": {}
 879    },
 880    {
 881      "id": "meta-llama/llama-3.1-70b-instruct",
 882      "name": "Meta: Llama 3.1 70B Instruct",
 883      "cost_per_1m_in": 0.39999999999999997,
 884      "cost_per_1m_out": 0.39999999999999997,
 885      "cost_per_1m_in_cached": 0,
 886      "cost_per_1m_out_cached": 0,
 887      "context_window": 131072,
 888      "default_max_tokens": 8192,
 889      "can_reason": false,
 890      "supports_attachments": false,
 891      "options": {}
 892    },
 893    {
 894      "id": "meta-llama/llama-3.1-8b-instruct",
 895      "name": "Meta: Llama 3.1 8B Instruct",
 896      "cost_per_1m_in": 0.03,
 897      "cost_per_1m_out": 0.049999999999999996,
 898      "cost_per_1m_in_cached": 0,
 899      "cost_per_1m_out_cached": 0,
 900      "context_window": 131072,
 901      "default_max_tokens": 8192,
 902      "can_reason": false,
 903      "supports_attachments": false,
 904      "options": {}
 905    },
 906    {
 907      "id": "meta-llama/llama-3.2-3b-instruct",
 908      "name": "Meta: Llama 3.2 3B Instruct",
 909      "cost_per_1m_in": 0.024,
 910      "cost_per_1m_out": 0.04,
 911      "cost_per_1m_in_cached": 0,
 912      "cost_per_1m_out_cached": 0,
 913      "context_window": 32768,
 914      "default_max_tokens": 16000,
 915      "can_reason": false,
 916      "supports_attachments": false,
 917      "options": {}
 918    },
 919    {
 920      "id": "meta-llama/llama-3.3-70b-instruct",
 921      "name": "Meta: Llama 3.3 70B Instruct",
 922      "cost_per_1m_in": 0.25,
 923      "cost_per_1m_out": 0.75,
 924      "cost_per_1m_in_cached": 0,
 925      "cost_per_1m_out_cached": 0,
 926      "context_window": 131072,
 927      "default_max_tokens": 13107,
 928      "can_reason": false,
 929      "supports_attachments": false,
 930      "options": {}
 931    },
 932    {
 933      "id": "meta-llama/llama-3.3-70b-instruct:free",
 934      "name": "Meta: Llama 3.3 70B Instruct (free)",
 935      "cost_per_1m_in": 0,
 936      "cost_per_1m_out": 0,
 937      "cost_per_1m_in_cached": 0,
 938      "cost_per_1m_out_cached": 0,
 939      "context_window": 131072,
 940      "default_max_tokens": 13107,
 941      "can_reason": false,
 942      "supports_attachments": false,
 943      "options": {}
 944    },
 945    {
 946      "id": "meta-llama/llama-4-maverick",
 947      "name": "Meta: Llama 4 Maverick",
 948      "cost_per_1m_in": 0.27,
 949      "cost_per_1m_out": 0.85,
 950      "cost_per_1m_in_cached": 0,
 951      "cost_per_1m_out_cached": 0,
 952      "context_window": 1048576,
 953      "default_max_tokens": 104857,
 954      "can_reason": false,
 955      "supports_attachments": true,
 956      "options": {}
 957    },
 958    {
 959      "id": "meta-llama/llama-4-scout",
 960      "name": "Meta: Llama 4 Scout",
 961      "cost_per_1m_in": 0.25,
 962      "cost_per_1m_out": 0.7,
 963      "cost_per_1m_in_cached": 0,
 964      "cost_per_1m_out_cached": 0,
 965      "context_window": 1310720,
 966      "default_max_tokens": 4096,
 967      "can_reason": false,
 968      "supports_attachments": true,
 969      "options": {}
 970    },
 971    {
 972      "id": "microsoft/phi-3-medium-128k-instruct",
 973      "name": "Microsoft: Phi-3 Medium 128K Instruct",
 974      "cost_per_1m_in": 1,
 975      "cost_per_1m_out": 1,
 976      "cost_per_1m_in_cached": 0,
 977      "cost_per_1m_out_cached": 0,
 978      "context_window": 128000,
 979      "default_max_tokens": 12800,
 980      "can_reason": false,
 981      "supports_attachments": false,
 982      "options": {}
 983    },
 984    {
 985      "id": "microsoft/phi-3-mini-128k-instruct",
 986      "name": "Microsoft: Phi-3 Mini 128K Instruct",
 987      "cost_per_1m_in": 0.09999999999999999,
 988      "cost_per_1m_out": 0.09999999999999999,
 989      "cost_per_1m_in_cached": 0,
 990      "cost_per_1m_out_cached": 0,
 991      "context_window": 128000,
 992      "default_max_tokens": 12800,
 993      "can_reason": false,
 994      "supports_attachments": false,
 995      "options": {}
 996    },
 997    {
 998      "id": "microsoft/phi-3.5-mini-128k-instruct",
 999      "name": "Microsoft: Phi-3.5 Mini 128K Instruct",
1000      "cost_per_1m_in": 0.09999999999999999,
1001      "cost_per_1m_out": 0.09999999999999999,
1002      "cost_per_1m_in_cached": 0,
1003      "cost_per_1m_out_cached": 0,
1004      "context_window": 128000,
1005      "default_max_tokens": 12800,
1006      "can_reason": false,
1007      "supports_attachments": false,
1008      "options": {}
1009    },
1010    {
1011      "id": "minimax/minimax-m2",
1012      "name": "MiniMax: MiniMax M2",
1013      "cost_per_1m_in": 0.255,
1014      "cost_per_1m_out": 1.02,
1015      "cost_per_1m_in_cached": 0,
1016      "cost_per_1m_out_cached": 0,
1017      "context_window": 204800,
1018      "default_max_tokens": 65536,
1019      "can_reason": true,
1020      "reasoning_levels": [
1021        "low",
1022        "medium",
1023        "high"
1024      ],
1025      "default_reasoning_effort": "medium",
1026      "supports_attachments": false,
1027      "options": {}
1028    },
1029    {
1030      "id": "mistralai/mistral-large",
1031      "name": "Mistral Large",
1032      "cost_per_1m_in": 2,
1033      "cost_per_1m_out": 6,
1034      "cost_per_1m_in_cached": 0,
1035      "cost_per_1m_out_cached": 0,
1036      "context_window": 128000,
1037      "default_max_tokens": 12800,
1038      "can_reason": false,
1039      "supports_attachments": false,
1040      "options": {}
1041    },
1042    {
1043      "id": "mistralai/mistral-large-2407",
1044      "name": "Mistral Large 2407",
1045      "cost_per_1m_in": 2,
1046      "cost_per_1m_out": 6,
1047      "cost_per_1m_in_cached": 0,
1048      "cost_per_1m_out_cached": 0,
1049      "context_window": 131072,
1050      "default_max_tokens": 13107,
1051      "can_reason": false,
1052      "supports_attachments": false,
1053      "options": {}
1054    },
1055    {
1056      "id": "mistralai/mistral-large-2411",
1057      "name": "Mistral Large 2411",
1058      "cost_per_1m_in": 2,
1059      "cost_per_1m_out": 6,
1060      "cost_per_1m_in_cached": 0,
1061      "cost_per_1m_out_cached": 0,
1062      "context_window": 131072,
1063      "default_max_tokens": 13107,
1064      "can_reason": false,
1065      "supports_attachments": false,
1066      "options": {}
1067    },
1068    {
1069      "id": "mistralai/mistral-small",
1070      "name": "Mistral Small",
1071      "cost_per_1m_in": 0.19999999999999998,
1072      "cost_per_1m_out": 0.6,
1073      "cost_per_1m_in_cached": 0,
1074      "cost_per_1m_out_cached": 0,
1075      "context_window": 32768,
1076      "default_max_tokens": 3276,
1077      "can_reason": false,
1078      "supports_attachments": false,
1079      "options": {}
1080    },
1081    {
1082      "id": "mistralai/mistral-tiny",
1083      "name": "Mistral Tiny",
1084      "cost_per_1m_in": 0.25,
1085      "cost_per_1m_out": 0.25,
1086      "cost_per_1m_in_cached": 0,
1087      "cost_per_1m_out_cached": 0,
1088      "context_window": 32768,
1089      "default_max_tokens": 3276,
1090      "can_reason": false,
1091      "supports_attachments": false,
1092      "options": {}
1093    },
1094    {
1095      "id": "mistralai/codestral-2501",
1096      "name": "Mistral: Codestral 2501",
1097      "cost_per_1m_in": 0.3,
1098      "cost_per_1m_out": 0.8999999999999999,
1099      "cost_per_1m_in_cached": 0,
1100      "cost_per_1m_out_cached": 0,
1101      "context_window": 256000,
1102      "default_max_tokens": 25600,
1103      "can_reason": false,
1104      "supports_attachments": false,
1105      "options": {}
1106    },
1107    {
1108      "id": "mistralai/codestral-2508",
1109      "name": "Mistral: Codestral 2508",
1110      "cost_per_1m_in": 0.3,
1111      "cost_per_1m_out": 0.8999999999999999,
1112      "cost_per_1m_in_cached": 0,
1113      "cost_per_1m_out_cached": 0,
1114      "context_window": 256000,
1115      "default_max_tokens": 25600,
1116      "can_reason": false,
1117      "supports_attachments": false,
1118      "options": {}
1119    },
1120    {
1121      "id": "mistralai/devstral-medium",
1122      "name": "Mistral: Devstral Medium",
1123      "cost_per_1m_in": 0.39999999999999997,
1124      "cost_per_1m_out": 2,
1125      "cost_per_1m_in_cached": 0,
1126      "cost_per_1m_out_cached": 0,
1127      "context_window": 131072,
1128      "default_max_tokens": 13107,
1129      "can_reason": false,
1130      "supports_attachments": false,
1131      "options": {}
1132    },
1133    {
1134      "id": "mistralai/devstral-small",
1135      "name": "Mistral: Devstral Small 1.1",
1136      "cost_per_1m_in": 0.09999999999999999,
1137      "cost_per_1m_out": 0.3,
1138      "cost_per_1m_in_cached": 0,
1139      "cost_per_1m_out_cached": 0,
1140      "context_window": 131072,
1141      "default_max_tokens": 13107,
1142      "can_reason": false,
1143      "supports_attachments": false,
1144      "options": {}
1145    },
1146    {
1147      "id": "mistralai/magistral-medium-2506",
1148      "name": "Mistral: Magistral Medium 2506",
1149      "cost_per_1m_in": 2,
1150      "cost_per_1m_out": 5,
1151      "cost_per_1m_in_cached": 0,
1152      "cost_per_1m_out_cached": 0,
1153      "context_window": 40960,
1154      "default_max_tokens": 20000,
1155      "can_reason": true,
1156      "reasoning_levels": [
1157        "low",
1158        "medium",
1159        "high"
1160      ],
1161      "default_reasoning_effort": "medium",
1162      "supports_attachments": false,
1163      "options": {}
1164    },
1165    {
1166      "id": "mistralai/magistral-medium-2506:thinking",
1167      "name": "Mistral: Magistral Medium 2506 (thinking)",
1168      "cost_per_1m_in": 2,
1169      "cost_per_1m_out": 5,
1170      "cost_per_1m_in_cached": 0,
1171      "cost_per_1m_out_cached": 0,
1172      "context_window": 40960,
1173      "default_max_tokens": 20000,
1174      "can_reason": true,
1175      "reasoning_levels": [
1176        "low",
1177        "medium",
1178        "high"
1179      ],
1180      "default_reasoning_effort": "medium",
1181      "supports_attachments": false,
1182      "options": {}
1183    },
1184    {
1185      "id": "mistralai/magistral-small-2506",
1186      "name": "Mistral: Magistral Small 2506",
1187      "cost_per_1m_in": 0.5,
1188      "cost_per_1m_out": 1.5,
1189      "cost_per_1m_in_cached": 0,
1190      "cost_per_1m_out_cached": 0,
1191      "context_window": 40000,
1192      "default_max_tokens": 20000,
1193      "can_reason": true,
1194      "reasoning_levels": [
1195        "low",
1196        "medium",
1197        "high"
1198      ],
1199      "default_reasoning_effort": "medium",
1200      "supports_attachments": false,
1201      "options": {}
1202    },
1203    {
1204      "id": "mistralai/ministral-3b",
1205      "name": "Mistral: Ministral 3B",
1206      "cost_per_1m_in": 0.04,
1207      "cost_per_1m_out": 0.04,
1208      "cost_per_1m_in_cached": 0,
1209      "cost_per_1m_out_cached": 0,
1210      "context_window": 131072,
1211      "default_max_tokens": 13107,
1212      "can_reason": false,
1213      "supports_attachments": false,
1214      "options": {}
1215    },
1216    {
1217      "id": "mistralai/ministral-8b",
1218      "name": "Mistral: Ministral 8B",
1219      "cost_per_1m_in": 0.09999999999999999,
1220      "cost_per_1m_out": 0.09999999999999999,
1221      "cost_per_1m_in_cached": 0,
1222      "cost_per_1m_out_cached": 0,
1223      "context_window": 131072,
1224      "default_max_tokens": 13107,
1225      "can_reason": false,
1226      "supports_attachments": false,
1227      "options": {}
1228    },
1229    {
1230      "id": "mistralai/mistral-7b-instruct",
1231      "name": "Mistral: Mistral 7B Instruct",
1232      "cost_per_1m_in": 0.028,
1233      "cost_per_1m_out": 0.054,
1234      "cost_per_1m_in_cached": 0,
1235      "cost_per_1m_out_cached": 0,
1236      "context_window": 32768,
1237      "default_max_tokens": 8192,
1238      "can_reason": false,
1239      "supports_attachments": false,
1240      "options": {}
1241    },
1242    {
1243      "id": "mistralai/mistral-7b-instruct:free",
1244      "name": "Mistral: Mistral 7B Instruct (free)",
1245      "cost_per_1m_in": 0,
1246      "cost_per_1m_out": 0,
1247      "cost_per_1m_in_cached": 0,
1248      "cost_per_1m_out_cached": 0,
1249      "context_window": 32768,
1250      "default_max_tokens": 8192,
1251      "can_reason": false,
1252      "supports_attachments": false,
1253      "options": {}
1254    },
1255    {
1256      "id": "mistralai/mistral-medium-3",
1257      "name": "Mistral: Mistral Medium 3",
1258      "cost_per_1m_in": 0.39999999999999997,
1259      "cost_per_1m_out": 2,
1260      "cost_per_1m_in_cached": 0,
1261      "cost_per_1m_out_cached": 0,
1262      "context_window": 131072,
1263      "default_max_tokens": 13107,
1264      "can_reason": false,
1265      "supports_attachments": true,
1266      "options": {}
1267    },
1268    {
1269      "id": "mistralai/mistral-medium-3.1",
1270      "name": "Mistral: Mistral Medium 3.1",
1271      "cost_per_1m_in": 0.39999999999999997,
1272      "cost_per_1m_out": 2,
1273      "cost_per_1m_in_cached": 0,
1274      "cost_per_1m_out_cached": 0,
1275      "context_window": 131072,
1276      "default_max_tokens": 13107,
1277      "can_reason": false,
1278      "supports_attachments": true,
1279      "options": {}
1280    },
1281    {
1282      "id": "mistralai/mistral-nemo",
1283      "name": "Mistral: Mistral Nemo",
1284      "cost_per_1m_in": 0.15,
1285      "cost_per_1m_out": 0.15,
1286      "cost_per_1m_in_cached": 0,
1287      "cost_per_1m_out_cached": 0,
1288      "context_window": 131072,
1289      "default_max_tokens": 13107,
1290      "can_reason": false,
1291      "supports_attachments": false,
1292      "options": {}
1293    },
1294    {
1295      "id": "mistralai/mistral-small-24b-instruct-2501",
1296      "name": "Mistral: Mistral Small 3",
1297      "cost_per_1m_in": 0.09999999999999999,
1298      "cost_per_1m_out": 0.3,
1299      "cost_per_1m_in_cached": 0,
1300      "cost_per_1m_out_cached": 0,
1301      "context_window": 32768,
1302      "default_max_tokens": 3276,
1303      "can_reason": false,
1304      "supports_attachments": false,
1305      "options": {}
1306    },
1307    {
1308      "id": "mistralai/mistral-small-3.1-24b-instruct",
1309      "name": "Mistral: Mistral Small 3.1 24B",
1310      "cost_per_1m_in": 0.09999999999999999,
1311      "cost_per_1m_out": 0.3,
1312      "cost_per_1m_in_cached": 0,
1313      "cost_per_1m_out_cached": 0,
1314      "context_window": 131072,
1315      "default_max_tokens": 13107,
1316      "can_reason": false,
1317      "supports_attachments": true,
1318      "options": {}
1319    },
1320    {
1321      "id": "mistralai/mistral-small-3.1-24b-instruct:free",
1322      "name": "Mistral: Mistral Small 3.1 24B (free)",
1323      "cost_per_1m_in": 0,
1324      "cost_per_1m_out": 0,
1325      "cost_per_1m_in_cached": 0,
1326      "cost_per_1m_out_cached": 0,
1327      "context_window": 128000,
1328      "default_max_tokens": 12800,
1329      "can_reason": false,
1330      "supports_attachments": true,
1331      "options": {}
1332    },
1333    {
1334      "id": "mistralai/mistral-small-3.2-24b-instruct",
1335      "name": "Mistral: Mistral Small 3.2 24B",
1336      "cost_per_1m_in": 0.06,
1337      "cost_per_1m_out": 0.18,
1338      "cost_per_1m_in_cached": 0,
1339      "cost_per_1m_out_cached": 0,
1340      "context_window": 131072,
1341      "default_max_tokens": 65536,
1342      "can_reason": false,
1343      "supports_attachments": true,
1344      "options": {}
1345    },
1346    {
1347      "id": "mistralai/mixtral-8x22b-instruct",
1348      "name": "Mistral: Mixtral 8x22B Instruct",
1349      "cost_per_1m_in": 2,
1350      "cost_per_1m_out": 6,
1351      "cost_per_1m_in_cached": 0,
1352      "cost_per_1m_out_cached": 0,
1353      "context_window": 65536,
1354      "default_max_tokens": 6553,
1355      "can_reason": false,
1356      "supports_attachments": false,
1357      "options": {}
1358    },
1359    {
1360      "id": "mistralai/mixtral-8x7b-instruct",
1361      "name": "Mistral: Mixtral 8x7B Instruct",
1362      "cost_per_1m_in": 0.54,
1363      "cost_per_1m_out": 0.54,
1364      "cost_per_1m_in_cached": 0,
1365      "cost_per_1m_out_cached": 0,
1366      "context_window": 32768,
1367      "default_max_tokens": 8192,
1368      "can_reason": false,
1369      "supports_attachments": false,
1370      "options": {}
1371    },
1372    {
1373      "id": "mistralai/pixtral-large-2411",
1374      "name": "Mistral: Pixtral Large 2411",
1375      "cost_per_1m_in": 2,
1376      "cost_per_1m_out": 6,
1377      "cost_per_1m_in_cached": 0,
1378      "cost_per_1m_out_cached": 0,
1379      "context_window": 131072,
1380      "default_max_tokens": 13107,
1381      "can_reason": false,
1382      "supports_attachments": true,
1383      "options": {}
1384    },
1385    {
1386      "id": "mistralai/mistral-saba",
1387      "name": "Mistral: Saba",
1388      "cost_per_1m_in": 0.19999999999999998,
1389      "cost_per_1m_out": 0.6,
1390      "cost_per_1m_in_cached": 0,
1391      "cost_per_1m_out_cached": 0,
1392      "context_window": 32768,
1393      "default_max_tokens": 3276,
1394      "can_reason": false,
1395      "supports_attachments": false,
1396      "options": {}
1397    },
1398    {
1399      "id": "mistralai/voxtral-small-24b-2507",
1400      "name": "Mistral: Voxtral Small 24B 2507",
1401      "cost_per_1m_in": 0.09999999999999999,
1402      "cost_per_1m_out": 0.3,
1403      "cost_per_1m_in_cached": 0,
1404      "cost_per_1m_out_cached": 0,
1405      "context_window": 32000,
1406      "default_max_tokens": 3200,
1407      "can_reason": false,
1408      "supports_attachments": false,
1409      "options": {}
1410    },
1411    {
1412      "id": "moonshotai/kimi-k2",
1413      "name": "MoonshotAI: Kimi K2 0711",
1414      "cost_per_1m_in": 0.7,
1415      "cost_per_1m_out": 2.5,
1416      "cost_per_1m_in_cached": 0,
1417      "cost_per_1m_out_cached": 0,
1418      "context_window": 131072,
1419      "default_max_tokens": 65536,
1420      "can_reason": false,
1421      "supports_attachments": false,
1422      "options": {}
1423    },
1424    {
1425      "id": "moonshotai/kimi-k2-0905",
1426      "name": "MoonshotAI: Kimi K2 0905",
1427      "cost_per_1m_in": 0.39,
1428      "cost_per_1m_out": 1.9,
1429      "cost_per_1m_in_cached": 0,
1430      "cost_per_1m_out_cached": 0,
1431      "context_window": 262144,
1432      "default_max_tokens": 131072,
1433      "can_reason": false,
1434      "supports_attachments": false,
1435      "options": {}
1436    },
1437    {
1438      "id": "moonshotai/kimi-k2-0905:exacto",
1439      "name": "MoonshotAI: Kimi K2 0905 (exacto)",
1440      "cost_per_1m_in": 1,
1441      "cost_per_1m_out": 3,
1442      "cost_per_1m_in_cached": 0,
1443      "cost_per_1m_out_cached": 0,
1444      "context_window": 262144,
1445      "default_max_tokens": 8192,
1446      "can_reason": false,
1447      "supports_attachments": false,
1448      "options": {}
1449    },
1450    {
1451      "id": "moonshotai/kimi-k2-thinking",
1452      "name": "MoonshotAI: Kimi K2 Thinking",
1453      "cost_per_1m_in": 0.5,
1454      "cost_per_1m_out": 2.5,
1455      "cost_per_1m_in_cached": 0,
1456      "cost_per_1m_out_cached": 0,
1457      "context_window": 262144,
1458      "default_max_tokens": 131072,
1459      "can_reason": true,
1460      "reasoning_levels": [
1461        "low",
1462        "medium",
1463        "high"
1464      ],
1465      "default_reasoning_effort": "medium",
1466      "supports_attachments": false,
1467      "options": {}
1468    },
1469    {
1470      "id": "nvidia/llama-3.1-nemotron-70b-instruct",
1471      "name": "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
1472      "cost_per_1m_in": 1.2,
1473      "cost_per_1m_out": 1.2,
1474      "cost_per_1m_in_cached": 0,
1475      "cost_per_1m_out_cached": 0,
1476      "context_window": 131072,
1477      "default_max_tokens": 8192,
1478      "can_reason": false,
1479      "supports_attachments": false,
1480      "options": {}
1481    },
1482    {
1483      "id": "nvidia/llama-3.3-nemotron-super-49b-v1.5",
1484      "name": "NVIDIA: Llama 3.3 Nemotron Super 49B V1.5",
1485      "cost_per_1m_in": 0.09999999999999999,
1486      "cost_per_1m_out": 0.39999999999999997,
1487      "cost_per_1m_in_cached": 0,
1488      "cost_per_1m_out_cached": 0,
1489      "context_window": 131072,
1490      "default_max_tokens": 13107,
1491      "can_reason": true,
1492      "reasoning_levels": [
1493        "low",
1494        "medium",
1495        "high"
1496      ],
1497      "default_reasoning_effort": "medium",
1498      "supports_attachments": false,
1499      "options": {}
1500    },
1501    {
1502      "id": "nvidia/nemotron-nano-12b-v2-vl:free",
1503      "name": "NVIDIA: Nemotron Nano 12B 2 VL (free)",
1504      "cost_per_1m_in": 0,
1505      "cost_per_1m_out": 0,
1506      "cost_per_1m_in_cached": 0,
1507      "cost_per_1m_out_cached": 0,
1508      "context_window": 128000,
1509      "default_max_tokens": 64000,
1510      "can_reason": true,
1511      "reasoning_levels": [
1512        "low",
1513        "medium",
1514        "high"
1515      ],
1516      "default_reasoning_effort": "medium",
1517      "supports_attachments": true,
1518      "options": {}
1519    },
1520    {
1521      "id": "nvidia/nemotron-nano-9b-v2",
1522      "name": "NVIDIA: Nemotron Nano 9B V2",
1523      "cost_per_1m_in": 0.04,
1524      "cost_per_1m_out": 0.16,
1525      "cost_per_1m_in_cached": 0,
1526      "cost_per_1m_out_cached": 0,
1527      "context_window": 131072,
1528      "default_max_tokens": 13107,
1529      "can_reason": true,
1530      "reasoning_levels": [
1531        "low",
1532        "medium",
1533        "high"
1534      ],
1535      "default_reasoning_effort": "medium",
1536      "supports_attachments": false,
1537      "options": {}
1538    },
1539    {
1540      "id": "nvidia/nemotron-nano-9b-v2:free",
1541      "name": "NVIDIA: Nemotron Nano 9B V2 (free)",
1542      "cost_per_1m_in": 0,
1543      "cost_per_1m_out": 0,
1544      "cost_per_1m_in_cached": 0,
1545      "cost_per_1m_out_cached": 0,
1546      "context_window": 128000,
1547      "default_max_tokens": 12800,
1548      "can_reason": true,
1549      "reasoning_levels": [
1550        "low",
1551        "medium",
1552        "high"
1553      ],
1554      "default_reasoning_effort": "medium",
1555      "supports_attachments": false,
1556      "options": {}
1557    },
1558    {
1559      "id": "nousresearch/deephermes-3-mistral-24b-preview",
1560      "name": "Nous: DeepHermes 3 Mistral 24B Preview",
1561      "cost_per_1m_in": 0.049999999999999996,
1562      "cost_per_1m_out": 0.19999999999999998,
1563      "cost_per_1m_in_cached": 0,
1564      "cost_per_1m_out_cached": 0,
1565      "context_window": 32768,
1566      "default_max_tokens": 16384,
1567      "can_reason": true,
1568      "reasoning_levels": [
1569        "low",
1570        "medium",
1571        "high"
1572      ],
1573      "default_reasoning_effort": "medium",
1574      "supports_attachments": false,
1575      "options": {}
1576    },
1577    {
1578      "id": "nousresearch/hermes-4-405b",
1579      "name": "Nous: Hermes 4 405B",
1580      "cost_per_1m_in": 0.3,
1581      "cost_per_1m_out": 1.2,
1582      "cost_per_1m_in_cached": 0,
1583      "cost_per_1m_out_cached": 0,
1584      "context_window": 131072,
1585      "default_max_tokens": 65536,
1586      "can_reason": true,
1587      "reasoning_levels": [
1588        "low",
1589        "medium",
1590        "high"
1591      ],
1592      "default_reasoning_effort": "medium",
1593      "supports_attachments": false,
1594      "options": {}
1595    },
1596    {
1597      "id": "openai/codex-mini",
1598      "name": "OpenAI: Codex Mini",
1599      "cost_per_1m_in": 1.5,
1600      "cost_per_1m_out": 6,
1601      "cost_per_1m_in_cached": 0,
1602      "cost_per_1m_out_cached": 0.375,
1603      "context_window": 200000,
1604      "default_max_tokens": 50000,
1605      "can_reason": true,
1606      "reasoning_levels": [
1607        "low",
1608        "medium",
1609        "high"
1610      ],
1611      "default_reasoning_effort": "medium",
1612      "supports_attachments": true,
1613      "options": {}
1614    },
1615    {
1616      "id": "openai/gpt-4-turbo",
1617      "name": "OpenAI: GPT-4 Turbo",
1618      "cost_per_1m_in": 10,
1619      "cost_per_1m_out": 30,
1620      "cost_per_1m_in_cached": 0,
1621      "cost_per_1m_out_cached": 0,
1622      "context_window": 128000,
1623      "default_max_tokens": 2048,
1624      "can_reason": false,
1625      "supports_attachments": true,
1626      "options": {}
1627    },
1628    {
1629      "id": "openai/gpt-4-1106-preview",
1630      "name": "OpenAI: GPT-4 Turbo (older v1106)",
1631      "cost_per_1m_in": 10,
1632      "cost_per_1m_out": 30,
1633      "cost_per_1m_in_cached": 0,
1634      "cost_per_1m_out_cached": 0,
1635      "context_window": 128000,
1636      "default_max_tokens": 2048,
1637      "can_reason": false,
1638      "supports_attachments": false,
1639      "options": {}
1640    },
1641    {
1642      "id": "openai/gpt-4-turbo-preview",
1643      "name": "OpenAI: GPT-4 Turbo Preview",
1644      "cost_per_1m_in": 10,
1645      "cost_per_1m_out": 30,
1646      "cost_per_1m_in_cached": 0,
1647      "cost_per_1m_out_cached": 0,
1648      "context_window": 128000,
1649      "default_max_tokens": 2048,
1650      "can_reason": false,
1651      "supports_attachments": false,
1652      "options": {}
1653    },
1654    {
1655      "id": "openai/gpt-4.1",
1656      "name": "OpenAI: GPT-4.1",
1657      "cost_per_1m_in": 2,
1658      "cost_per_1m_out": 8,
1659      "cost_per_1m_in_cached": 0,
1660      "cost_per_1m_out_cached": 0.5,
1661      "context_window": 1047576,
1662      "default_max_tokens": 104757,
1663      "can_reason": false,
1664      "supports_attachments": true,
1665      "options": {}
1666    },
1667    {
1668      "id": "openai/gpt-4.1-mini",
1669      "name": "OpenAI: GPT-4.1 Mini",
1670      "cost_per_1m_in": 0.39999999999999997,
1671      "cost_per_1m_out": 1.5999999999999999,
1672      "cost_per_1m_in_cached": 0,
1673      "cost_per_1m_out_cached": 0.09999999999999999,
1674      "context_window": 1047576,
1675      "default_max_tokens": 104757,
1676      "can_reason": false,
1677      "supports_attachments": true,
1678      "options": {}
1679    },
1680    {
1681      "id": "openai/gpt-4.1-nano",
1682      "name": "OpenAI: GPT-4.1 Nano",
1683      "cost_per_1m_in": 0.09999999999999999,
1684      "cost_per_1m_out": 0.39999999999999997,
1685      "cost_per_1m_in_cached": 0,
1686      "cost_per_1m_out_cached": 0.024999999999999998,
1687      "context_window": 1047576,
1688      "default_max_tokens": 16384,
1689      "can_reason": false,
1690      "supports_attachments": true,
1691      "options": {}
1692    },
1693    {
1694      "id": "openai/gpt-4o",
1695      "name": "OpenAI: GPT-4o",
1696      "cost_per_1m_in": 2.5,
1697      "cost_per_1m_out": 10,
1698      "cost_per_1m_in_cached": 0,
1699      "cost_per_1m_out_cached": 0,
1700      "context_window": 128000,
1701      "default_max_tokens": 8192,
1702      "can_reason": false,
1703      "supports_attachments": true,
1704      "options": {}
1705    },
1706    {
1707      "id": "openai/gpt-4o-2024-05-13",
1708      "name": "OpenAI: GPT-4o (2024-05-13)",
1709      "cost_per_1m_in": 5,
1710      "cost_per_1m_out": 15,
1711      "cost_per_1m_in_cached": 0,
1712      "cost_per_1m_out_cached": 0,
1713      "context_window": 128000,
1714      "default_max_tokens": 2048,
1715      "can_reason": false,
1716      "supports_attachments": true,
1717      "options": {}
1718    },
1719    {
1720      "id": "openai/gpt-4o-2024-08-06",
1721      "name": "OpenAI: GPT-4o (2024-08-06)",
1722      "cost_per_1m_in": 2.5,
1723      "cost_per_1m_out": 10,
1724      "cost_per_1m_in_cached": 0,
1725      "cost_per_1m_out_cached": 1.25,
1726      "context_window": 128000,
1727      "default_max_tokens": 8192,
1728      "can_reason": false,
1729      "supports_attachments": true,
1730      "options": {}
1731    },
1732    {
1733      "id": "openai/gpt-4o-2024-11-20",
1734      "name": "OpenAI: GPT-4o (2024-11-20)",
1735      "cost_per_1m_in": 2.5,
1736      "cost_per_1m_out": 10,
1737      "cost_per_1m_in_cached": 0,
1738      "cost_per_1m_out_cached": 1.25,
1739      "context_window": 128000,
1740      "default_max_tokens": 8192,
1741      "can_reason": false,
1742      "supports_attachments": true,
1743      "options": {}
1744    },
1745    {
1746      "id": "openai/gpt-4o:extended",
1747      "name": "OpenAI: GPT-4o (extended)",
1748      "cost_per_1m_in": 6,
1749      "cost_per_1m_out": 18,
1750      "cost_per_1m_in_cached": 0,
1751      "cost_per_1m_out_cached": 0,
1752      "context_window": 128000,
1753      "default_max_tokens": 32000,
1754      "can_reason": false,
1755      "supports_attachments": true,
1756      "options": {}
1757    },
1758    {
1759      "id": "openai/gpt-4o-audio-preview",
1760      "name": "OpenAI: GPT-4o Audio",
1761      "cost_per_1m_in": 2.5,
1762      "cost_per_1m_out": 10,
1763      "cost_per_1m_in_cached": 0,
1764      "cost_per_1m_out_cached": 0,
1765      "context_window": 128000,
1766      "default_max_tokens": 8192,
1767      "can_reason": false,
1768      "supports_attachments": false,
1769      "options": {}
1770    },
1771    {
1772      "id": "openai/gpt-4o-mini",
1773      "name": "OpenAI: GPT-4o-mini",
1774      "cost_per_1m_in": 0.15,
1775      "cost_per_1m_out": 0.6,
1776      "cost_per_1m_in_cached": 0,
1777      "cost_per_1m_out_cached": 0.075,
1778      "context_window": 128000,
1779      "default_max_tokens": 8192,
1780      "can_reason": false,
1781      "supports_attachments": true,
1782      "options": {}
1783    },
1784    {
1785      "id": "openai/gpt-4o-mini-2024-07-18",
1786      "name": "OpenAI: GPT-4o-mini (2024-07-18)",
1787      "cost_per_1m_in": 0.15,
1788      "cost_per_1m_out": 0.6,
1789      "cost_per_1m_in_cached": 0,
1790      "cost_per_1m_out_cached": 0.075,
1791      "context_window": 128000,
1792      "default_max_tokens": 8192,
1793      "can_reason": false,
1794      "supports_attachments": true,
1795      "options": {}
1796    },
1797    {
1798      "id": "openai/gpt-5",
1799      "name": "OpenAI: GPT-5",
1800      "cost_per_1m_in": 1.25,
1801      "cost_per_1m_out": 10,
1802      "cost_per_1m_in_cached": 0,
1803      "cost_per_1m_out_cached": 0.125,
1804      "context_window": 400000,
1805      "default_max_tokens": 64000,
1806      "can_reason": true,
1807      "reasoning_levels": [
1808        "low",
1809        "medium",
1810        "high"
1811      ],
1812      "default_reasoning_effort": "medium",
1813      "supports_attachments": true,
1814      "options": {}
1815    },
1816    {
1817      "id": "openai/gpt-5-codex",
1818      "name": "OpenAI: GPT-5 Codex",
1819      "cost_per_1m_in": 1.25,
1820      "cost_per_1m_out": 10,
1821      "cost_per_1m_in_cached": 0,
1822      "cost_per_1m_out_cached": 0.125,
1823      "context_window": 400000,
1824      "default_max_tokens": 64000,
1825      "can_reason": true,
1826      "reasoning_levels": [
1827        "low",
1828        "medium",
1829        "high"
1830      ],
1831      "default_reasoning_effort": "medium",
1832      "supports_attachments": true,
1833      "options": {}
1834    },
1835    {
1836      "id": "openai/gpt-5-image",
1837      "name": "OpenAI: GPT-5 Image",
1838      "cost_per_1m_in": 10,
1839      "cost_per_1m_out": 10,
1840      "cost_per_1m_in_cached": 0,
1841      "cost_per_1m_out_cached": 1.25,
1842      "context_window": 400000,
1843      "default_max_tokens": 64000,
1844      "can_reason": true,
1845      "reasoning_levels": [
1846        "low",
1847        "medium",
1848        "high"
1849      ],
1850      "default_reasoning_effort": "medium",
1851      "supports_attachments": true,
1852      "options": {}
1853    },
1854    {
1855      "id": "openai/gpt-5-image-mini",
1856      "name": "OpenAI: GPT-5 Image Mini",
1857      "cost_per_1m_in": 2.5,
1858      "cost_per_1m_out": 2,
1859      "cost_per_1m_in_cached": 0,
1860      "cost_per_1m_out_cached": 0.25,
1861      "context_window": 400000,
1862      "default_max_tokens": 64000,
1863      "can_reason": true,
1864      "reasoning_levels": [
1865        "low",
1866        "medium",
1867        "high"
1868      ],
1869      "default_reasoning_effort": "medium",
1870      "supports_attachments": true,
1871      "options": {}
1872    },
1873    {
1874      "id": "openai/gpt-5-mini",
1875      "name": "OpenAI: GPT-5 Mini",
1876      "cost_per_1m_in": 0.25,
1877      "cost_per_1m_out": 2,
1878      "cost_per_1m_in_cached": 0,
1879      "cost_per_1m_out_cached": 0.03,
1880      "context_window": 400000,
1881      "default_max_tokens": 40000,
1882      "can_reason": true,
1883      "reasoning_levels": [
1884        "low",
1885        "medium",
1886        "high"
1887      ],
1888      "default_reasoning_effort": "medium",
1889      "supports_attachments": true,
1890      "options": {}
1891    },
1892    {
1893      "id": "openai/gpt-5-nano",
1894      "name": "OpenAI: GPT-5 Nano",
1895      "cost_per_1m_in": 0.049999999999999996,
1896      "cost_per_1m_out": 0.39999999999999997,
1897      "cost_per_1m_in_cached": 0,
1898      "cost_per_1m_out_cached": 0.01,
1899      "context_window": 400000,
1900      "default_max_tokens": 40000,
1901      "can_reason": true,
1902      "reasoning_levels": [
1903        "low",
1904        "medium",
1905        "high"
1906      ],
1907      "default_reasoning_effort": "medium",
1908      "supports_attachments": true,
1909      "options": {}
1910    },
1911    {
1912      "id": "openai/gpt-5-pro",
1913      "name": "OpenAI: GPT-5 Pro",
1914      "cost_per_1m_in": 15,
1915      "cost_per_1m_out": 120,
1916      "cost_per_1m_in_cached": 0,
1917      "cost_per_1m_out_cached": 0,
1918      "context_window": 400000,
1919      "default_max_tokens": 64000,
1920      "can_reason": true,
1921      "reasoning_levels": [
1922        "low",
1923        "medium",
1924        "high"
1925      ],
1926      "default_reasoning_effort": "medium",
1927      "supports_attachments": true,
1928      "options": {}
1929    },
1930    {
1931      "id": "openai/gpt-5.1",
1932      "name": "OpenAI: GPT-5.1",
1933      "cost_per_1m_in": 1.25,
1934      "cost_per_1m_out": 10,
1935      "cost_per_1m_in_cached": 0,
1936      "cost_per_1m_out_cached": 0.125,
1937      "context_window": 400000,
1938      "default_max_tokens": 64000,
1939      "can_reason": true,
1940      "reasoning_levels": [
1941        "low",
1942        "medium",
1943        "high"
1944      ],
1945      "default_reasoning_effort": "medium",
1946      "supports_attachments": true,
1947      "options": {}
1948    },
1949    {
1950      "id": "openai/gpt-5.1-chat",
1951      "name": "OpenAI: GPT-5.1 Chat",
1952      "cost_per_1m_in": 1.25,
1953      "cost_per_1m_out": 10,
1954      "cost_per_1m_in_cached": 0,
1955      "cost_per_1m_out_cached": 0.125,
1956      "context_window": 128000,
1957      "default_max_tokens": 8192,
1958      "can_reason": false,
1959      "supports_attachments": true,
1960      "options": {}
1961    },
1962    {
1963      "id": "openai/gpt-5.1-codex",
1964      "name": "OpenAI: GPT-5.1-Codex",
1965      "cost_per_1m_in": 1.25,
1966      "cost_per_1m_out": 10,
1967      "cost_per_1m_in_cached": 0,
1968      "cost_per_1m_out_cached": 0.125,
1969      "context_window": 400000,
1970      "default_max_tokens": 64000,
1971      "can_reason": true,
1972      "reasoning_levels": [
1973        "low",
1974        "medium",
1975        "high"
1976      ],
1977      "default_reasoning_effort": "medium",
1978      "supports_attachments": true,
1979      "options": {}
1980    },
1981    {
1982      "id": "openai/gpt-5.1-codex-mini",
1983      "name": "OpenAI: GPT-5.1-Codex-Mini",
1984      "cost_per_1m_in": 0.25,
1985      "cost_per_1m_out": 2,
1986      "cost_per_1m_in_cached": 0,
1987      "cost_per_1m_out_cached": 0.024999999999999998,
1988      "context_window": 400000,
1989      "default_max_tokens": 50000,
1990      "can_reason": true,
1991      "reasoning_levels": [
1992        "low",
1993        "medium",
1994        "high"
1995      ],
1996      "default_reasoning_effort": "medium",
1997      "supports_attachments": true,
1998      "options": {}
1999    },
2000    {
2001      "id": "openai/gpt-oss-120b",
2002      "name": "OpenAI: gpt-oss-120b",
2003      "cost_per_1m_in": 0.04,
2004      "cost_per_1m_out": 0.39999999999999997,
2005      "cost_per_1m_in_cached": 0,
2006      "cost_per_1m_out_cached": 0,
2007      "context_window": 131072,
2008      "default_max_tokens": 65536,
2009      "can_reason": true,
2010      "reasoning_levels": [
2011        "low",
2012        "medium",
2013        "high"
2014      ],
2015      "default_reasoning_effort": "medium",
2016      "supports_attachments": false,
2017      "options": {}
2018    },
2019    {
2020      "id": "openai/gpt-oss-120b:exacto",
2021      "name": "OpenAI: gpt-oss-120b (exacto)",
2022      "cost_per_1m_in": 0.04,
2023      "cost_per_1m_out": 0.19999999999999998,
2024      "cost_per_1m_in_cached": 0,
2025      "cost_per_1m_out_cached": 0,
2026      "context_window": 131072,
2027      "default_max_tokens": 16384,
2028      "can_reason": true,
2029      "reasoning_levels": [
2030        "low",
2031        "medium",
2032        "high"
2033      ],
2034      "default_reasoning_effort": "medium",
2035      "supports_attachments": false,
2036      "options": {}
2037    },
2038    {
2039      "id": "openai/gpt-oss-20b",
2040      "name": "OpenAI: gpt-oss-20b",
2041      "cost_per_1m_in": 0.03,
2042      "cost_per_1m_out": 0.14,
2043      "cost_per_1m_in_cached": 0,
2044      "cost_per_1m_out_cached": 0,
2045      "context_window": 131072,
2046      "default_max_tokens": 13107,
2047      "can_reason": true,
2048      "reasoning_levels": [
2049        "low",
2050        "medium",
2051        "high"
2052      ],
2053      "default_reasoning_effort": "medium",
2054      "supports_attachments": false,
2055      "options": {}
2056    },
2057    {
2058      "id": "openai/gpt-oss-20b:free",
2059      "name": "OpenAI: gpt-oss-20b (free)",
2060      "cost_per_1m_in": 0,
2061      "cost_per_1m_out": 0,
2062      "cost_per_1m_in_cached": 0,
2063      "cost_per_1m_out_cached": 0,
2064      "context_window": 131072,
2065      "default_max_tokens": 65536,
2066      "can_reason": true,
2067      "reasoning_levels": [
2068        "low",
2069        "medium",
2070        "high"
2071      ],
2072      "default_reasoning_effort": "medium",
2073      "supports_attachments": false,
2074      "options": {}
2075    },
2076    {
2077      "id": "openai/gpt-oss-safeguard-20b",
2078      "name": "OpenAI: gpt-oss-safeguard-20b",
2079      "cost_per_1m_in": 0.075,
2080      "cost_per_1m_out": 0.3,
2081      "cost_per_1m_in_cached": 0,
2082      "cost_per_1m_out_cached": 0.037,
2083      "context_window": 131072,
2084      "default_max_tokens": 32768,
2085      "can_reason": true,
2086      "reasoning_levels": [
2087        "low",
2088        "medium",
2089        "high"
2090      ],
2091      "default_reasoning_effort": "medium",
2092      "supports_attachments": false,
2093      "options": {}
2094    },
2095    {
2096      "id": "openai/o1",
2097      "name": "OpenAI: o1",
2098      "cost_per_1m_in": 15,
2099      "cost_per_1m_out": 60,
2100      "cost_per_1m_in_cached": 0,
2101      "cost_per_1m_out_cached": 7.5,
2102      "context_window": 200000,
2103      "default_max_tokens": 50000,
2104      "can_reason": false,
2105      "supports_attachments": true,
2106      "options": {}
2107    },
2108    {
2109      "id": "openai/o3",
2110      "name": "OpenAI: o3",
2111      "cost_per_1m_in": 2,
2112      "cost_per_1m_out": 8,
2113      "cost_per_1m_in_cached": 0,
2114      "cost_per_1m_out_cached": 0.5,
2115      "context_window": 200000,
2116      "default_max_tokens": 50000,
2117      "can_reason": true,
2118      "reasoning_levels": [
2119        "low",
2120        "medium",
2121        "high"
2122      ],
2123      "default_reasoning_effort": "medium",
2124      "supports_attachments": true,
2125      "options": {}
2126    },
2127    {
2128      "id": "openai/o3-deep-research",
2129      "name": "OpenAI: o3 Deep Research",
2130      "cost_per_1m_in": 10,
2131      "cost_per_1m_out": 40,
2132      "cost_per_1m_in_cached": 0,
2133      "cost_per_1m_out_cached": 2.5,
2134      "context_window": 200000,
2135      "default_max_tokens": 50000,
2136      "can_reason": true,
2137      "reasoning_levels": [
2138        "low",
2139        "medium",
2140        "high"
2141      ],
2142      "default_reasoning_effort": "medium",
2143      "supports_attachments": true,
2144      "options": {}
2145    },
2146    {
2147      "id": "openai/o3-mini",
2148      "name": "OpenAI: o3 Mini",
2149      "cost_per_1m_in": 1.1,
2150      "cost_per_1m_out": 4.4,
2151      "cost_per_1m_in_cached": 0,
2152      "cost_per_1m_out_cached": 0.55,
2153      "context_window": 200000,
2154      "default_max_tokens": 50000,
2155      "can_reason": false,
2156      "supports_attachments": false,
2157      "options": {}
2158    },
2159    {
2160      "id": "openai/o3-mini-high",
2161      "name": "OpenAI: o3 Mini High",
2162      "cost_per_1m_in": 1.1,
2163      "cost_per_1m_out": 4.4,
2164      "cost_per_1m_in_cached": 0,
2165      "cost_per_1m_out_cached": 0.55,
2166      "context_window": 200000,
2167      "default_max_tokens": 50000,
2168      "can_reason": false,
2169      "supports_attachments": false,
2170      "options": {}
2171    },
2172    {
2173      "id": "openai/o3-pro",
2174      "name": "OpenAI: o3 Pro",
2175      "cost_per_1m_in": 20,
2176      "cost_per_1m_out": 80,
2177      "cost_per_1m_in_cached": 0,
2178      "cost_per_1m_out_cached": 0,
2179      "context_window": 200000,
2180      "default_max_tokens": 50000,
2181      "can_reason": true,
2182      "reasoning_levels": [
2183        "low",
2184        "medium",
2185        "high"
2186      ],
2187      "default_reasoning_effort": "medium",
2188      "supports_attachments": true,
2189      "options": {}
2190    },
2191    {
2192      "id": "openai/o4-mini",
2193      "name": "OpenAI: o4 Mini",
2194      "cost_per_1m_in": 1.1,
2195      "cost_per_1m_out": 4.4,
2196      "cost_per_1m_in_cached": 0,
2197      "cost_per_1m_out_cached": 0.275,
2198      "context_window": 200000,
2199      "default_max_tokens": 50000,
2200      "can_reason": true,
2201      "reasoning_levels": [
2202        "low",
2203        "medium",
2204        "high"
2205      ],
2206      "default_reasoning_effort": "medium",
2207      "supports_attachments": true,
2208      "options": {}
2209    },
2210    {
2211      "id": "openai/o4-mini-deep-research",
2212      "name": "OpenAI: o4 Mini Deep Research",
2213      "cost_per_1m_in": 2,
2214      "cost_per_1m_out": 8,
2215      "cost_per_1m_in_cached": 0,
2216      "cost_per_1m_out_cached": 0.5,
2217      "context_window": 200000,
2218      "default_max_tokens": 50000,
2219      "can_reason": true,
2220      "reasoning_levels": [
2221        "low",
2222        "medium",
2223        "high"
2224      ],
2225      "default_reasoning_effort": "medium",
2226      "supports_attachments": true,
2227      "options": {}
2228    },
2229    {
2230      "id": "openai/o4-mini-high",
2231      "name": "OpenAI: o4 Mini High",
2232      "cost_per_1m_in": 1.1,
2233      "cost_per_1m_out": 4.4,
2234      "cost_per_1m_in_cached": 0,
2235      "cost_per_1m_out_cached": 0.275,
2236      "context_window": 200000,
2237      "default_max_tokens": 50000,
2238      "can_reason": true,
2239      "reasoning_levels": [
2240        "low",
2241        "medium",
2242        "high"
2243      ],
2244      "default_reasoning_effort": "medium",
2245      "supports_attachments": true,
2246      "options": {}
2247    },
2248    {
2249      "id": "prime-intellect/intellect-3",
2250      "name": "Prime Intellect: INTELLECT-3",
2251      "cost_per_1m_in": 0.19999999999999998,
2252      "cost_per_1m_out": 1.1,
2253      "cost_per_1m_in_cached": 0,
2254      "cost_per_1m_out_cached": 0,
2255      "context_window": 131072,
2256      "default_max_tokens": 65536,
2257      "can_reason": true,
2258      "reasoning_levels": [
2259        "low",
2260        "medium",
2261        "high"
2262      ],
2263      "default_reasoning_effort": "medium",
2264      "supports_attachments": false,
2265      "options": {}
2266    },
2267    {
2268      "id": "qwen/qwen-2.5-72b-instruct",
2269      "name": "Qwen2.5 72B Instruct",
2270      "cost_per_1m_in": 0.07,
2271      "cost_per_1m_out": 0.26,
2272      "cost_per_1m_in_cached": 0,
2273      "cost_per_1m_out_cached": 0,
2274      "context_window": 32768,
2275      "default_max_tokens": 16384,
2276      "can_reason": false,
2277      "supports_attachments": false,
2278      "options": {}
2279    },
2280    {
2281      "id": "qwen/qwq-32b",
2282      "name": "Qwen: QwQ 32B",
2283      "cost_per_1m_in": 0.15,
2284      "cost_per_1m_out": 0.58,
2285      "cost_per_1m_in_cached": 0,
2286      "cost_per_1m_out_cached": 0,
2287      "context_window": 131072,
2288      "default_max_tokens": 65536,
2289      "can_reason": true,
2290      "reasoning_levels": [
2291        "low",
2292        "medium",
2293        "high"
2294      ],
2295      "default_reasoning_effort": "medium",
2296      "supports_attachments": false,
2297      "options": {}
2298    },
2299    {
2300      "id": "qwen/qwen-plus-2025-07-28",
2301      "name": "Qwen: Qwen Plus 0728",
2302      "cost_per_1m_in": 0.39999999999999997,
2303      "cost_per_1m_out": 1.2,
2304      "cost_per_1m_in_cached": 0,
2305      "cost_per_1m_out_cached": 0,
2306      "context_window": 1000000,
2307      "default_max_tokens": 16384,
2308      "can_reason": false,
2309      "supports_attachments": false,
2310      "options": {}
2311    },
2312    {
2313      "id": "qwen/qwen-plus-2025-07-28:thinking",
2314      "name": "Qwen: Qwen Plus 0728 (thinking)",
2315      "cost_per_1m_in": 0.39999999999999997,
2316      "cost_per_1m_out": 4,
2317      "cost_per_1m_in_cached": 0,
2318      "cost_per_1m_out_cached": 0,
2319      "context_window": 1000000,
2320      "default_max_tokens": 16384,
2321      "can_reason": true,
2322      "reasoning_levels": [
2323        "low",
2324        "medium",
2325        "high"
2326      ],
2327      "default_reasoning_effort": "medium",
2328      "supports_attachments": false,
2329      "options": {}
2330    },
2331    {
2332      "id": "qwen/qwen-vl-max",
2333      "name": "Qwen: Qwen VL Max",
2334      "cost_per_1m_in": 0.7999999999999999,
2335      "cost_per_1m_out": 3.1999999999999997,
2336      "cost_per_1m_in_cached": 0,
2337      "cost_per_1m_out_cached": 0,
2338      "context_window": 131072,
2339      "default_max_tokens": 4096,
2340      "can_reason": false,
2341      "supports_attachments": true,
2342      "options": {}
2343    },
2344    {
2345      "id": "qwen/qwen-max",
2346      "name": "Qwen: Qwen-Max ",
2347      "cost_per_1m_in": 1.5999999999999999,
2348      "cost_per_1m_out": 6.3999999999999995,
2349      "cost_per_1m_in_cached": 0,
2350      "cost_per_1m_out_cached": 0.64,
2351      "context_window": 32768,
2352      "default_max_tokens": 4096,
2353      "can_reason": false,
2354      "supports_attachments": false,
2355      "options": {}
2356    },
2357    {
2358      "id": "qwen/qwen-plus",
2359      "name": "Qwen: Qwen-Plus",
2360      "cost_per_1m_in": 0.39999999999999997,
2361      "cost_per_1m_out": 1.2,
2362      "cost_per_1m_in_cached": 0,
2363      "cost_per_1m_out_cached": 0.16,
2364      "context_window": 131072,
2365      "default_max_tokens": 4096,
2366      "can_reason": false,
2367      "supports_attachments": false,
2368      "options": {}
2369    },
2370    {
2371      "id": "qwen/qwen-turbo",
2372      "name": "Qwen: Qwen-Turbo",
2373      "cost_per_1m_in": 0.049999999999999996,
2374      "cost_per_1m_out": 0.19999999999999998,
2375      "cost_per_1m_in_cached": 0,
2376      "cost_per_1m_out_cached": 0.02,
2377      "context_window": 1000000,
2378      "default_max_tokens": 4096,
2379      "can_reason": false,
2380      "supports_attachments": false,
2381      "options": {}
2382    },
2383    {
2384      "id": "qwen/qwen3-14b",
2385      "name": "Qwen: Qwen3 14B",
2386      "cost_per_1m_in": 0.049999999999999996,
2387      "cost_per_1m_out": 0.22,
2388      "cost_per_1m_in_cached": 0,
2389      "cost_per_1m_out_cached": 0,
2390      "context_window": 40960,
2391      "default_max_tokens": 20480,
2392      "can_reason": true,
2393      "reasoning_levels": [
2394        "low",
2395        "medium",
2396        "high"
2397      ],
2398      "default_reasoning_effort": "medium",
2399      "supports_attachments": false,
2400      "options": {}
2401    },
2402    {
2403      "id": "qwen/qwen3-235b-a22b",
2404      "name": "Qwen: Qwen3 235B A22B",
2405      "cost_per_1m_in": 0.22,
2406      "cost_per_1m_out": 0.88,
2407      "cost_per_1m_in_cached": 0,
2408      "cost_per_1m_out_cached": 0,
2409      "context_window": 131072,
2410      "default_max_tokens": 13107,
2411      "can_reason": true,
2412      "reasoning_levels": [
2413        "low",
2414        "medium",
2415        "high"
2416      ],
2417      "default_reasoning_effort": "medium",
2418      "supports_attachments": false,
2419      "options": {}
2420    },
2421    {
2422      "id": "qwen/qwen3-235b-a22b:free",
2423      "name": "Qwen: Qwen3 235B A22B (free)",
2424      "cost_per_1m_in": 0,
2425      "cost_per_1m_out": 0,
2426      "cost_per_1m_in_cached": 0,
2427      "cost_per_1m_out_cached": 0,
2428      "context_window": 131072,
2429      "default_max_tokens": 13107,
2430      "can_reason": true,
2431      "reasoning_levels": [
2432        "low",
2433        "medium",
2434        "high"
2435      ],
2436      "default_reasoning_effort": "medium",
2437      "supports_attachments": false,
2438      "options": {}
2439    },
2440    {
2441      "id": "qwen/qwen3-235b-a22b-2507",
2442      "name": "Qwen: Qwen3 235B A22B Instruct 2507",
2443      "cost_per_1m_in": 0.25,
2444      "cost_per_1m_out": 1,
2445      "cost_per_1m_in_cached": 0,
2446      "cost_per_1m_out_cached": 0,
2447      "context_window": 262144,
2448      "default_max_tokens": 8192,
2449      "can_reason": false,
2450      "supports_attachments": false,
2451      "options": {}
2452    },
2453    {
2454      "id": "qwen/qwen3-235b-a22b-thinking-2507",
2455      "name": "Qwen: Qwen3 235B A22B Thinking 2507",
2456      "cost_per_1m_in": 0.11,
2457      "cost_per_1m_out": 0.6,
2458      "cost_per_1m_in_cached": 0,
2459      "cost_per_1m_out_cached": 0,
2460      "context_window": 262144,
2461      "default_max_tokens": 131072,
2462      "can_reason": true,
2463      "reasoning_levels": [
2464        "low",
2465        "medium",
2466        "high"
2467      ],
2468      "default_reasoning_effort": "medium",
2469      "supports_attachments": false,
2470      "options": {}
2471    },
2472    {
2473      "id": "qwen/qwen3-30b-a3b",
2474      "name": "Qwen: Qwen3 30B A3B",
2475      "cost_per_1m_in": 0.08,
2476      "cost_per_1m_out": 0.28,
2477      "cost_per_1m_in_cached": 0,
2478      "cost_per_1m_out_cached": 0,
2479      "context_window": 131072,
2480      "default_max_tokens": 65536,
2481      "can_reason": true,
2482      "reasoning_levels": [
2483        "low",
2484        "medium",
2485        "high"
2486      ],
2487      "default_reasoning_effort": "medium",
2488      "supports_attachments": false,
2489      "options": {}
2490    },
2491    {
2492      "id": "qwen/qwen3-30b-a3b-instruct-2507",
2493      "name": "Qwen: Qwen3 30B A3B Instruct 2507",
2494      "cost_per_1m_in": 0.08,
2495      "cost_per_1m_out": 0.33,
2496      "cost_per_1m_in_cached": 0,
2497      "cost_per_1m_out_cached": 0,
2498      "context_window": 262144,
2499      "default_max_tokens": 131072,
2500      "can_reason": false,
2501      "supports_attachments": false,
2502      "options": {}
2503    },
2504    {
2505      "id": "qwen/qwen3-32b",
2506      "name": "Qwen: Qwen3 32B",
2507      "cost_per_1m_in": 0.15,
2508      "cost_per_1m_out": 0.5,
2509      "cost_per_1m_in_cached": 0,
2510      "cost_per_1m_out_cached": 0,
2511      "context_window": 131072,
2512      "default_max_tokens": 4000,
2513      "can_reason": true,
2514      "reasoning_levels": [
2515        "low",
2516        "medium",
2517        "high"
2518      ],
2519      "default_reasoning_effort": "medium",
2520      "supports_attachments": false,
2521      "options": {}
2522    },
2523    {
2524      "id": "qwen/qwen3-4b:free",
2525      "name": "Qwen: Qwen3 4B (free)",
2526      "cost_per_1m_in": 0,
2527      "cost_per_1m_out": 0,
2528      "cost_per_1m_in_cached": 0,
2529      "cost_per_1m_out_cached": 0,
2530      "context_window": 40960,
2531      "default_max_tokens": 4096,
2532      "can_reason": true,
2533      "reasoning_levels": [
2534        "low",
2535        "medium",
2536        "high"
2537      ],
2538      "default_reasoning_effort": "medium",
2539      "supports_attachments": false,
2540      "options": {}
2541    },
2542    {
2543      "id": "qwen/qwen3-8b",
2544      "name": "Qwen: Qwen3 8B",
2545      "cost_per_1m_in": 0.2,
2546      "cost_per_1m_out": 0.2,
2547      "cost_per_1m_in_cached": 0,
2548      "cost_per_1m_out_cached": 0,
2549      "context_window": 40960,
2550      "default_max_tokens": 4096,
2551      "can_reason": true,
2552      "reasoning_levels": [
2553        "low",
2554        "medium",
2555        "high"
2556      ],
2557      "default_reasoning_effort": "medium",
2558      "supports_attachments": false,
2559      "options": {}
2560    },
2561    {
2562      "id": "qwen/qwen3-coder-30b-a3b-instruct",
2563      "name": "Qwen: Qwen3 Coder 30B A3B Instruct",
2564      "cost_per_1m_in": 0.06,
2565      "cost_per_1m_out": 0.25,
2566      "cost_per_1m_in_cached": 0,
2567      "cost_per_1m_out_cached": 0,
2568      "context_window": 262144,
2569      "default_max_tokens": 131072,
2570      "can_reason": false,
2571      "supports_attachments": false,
2572      "options": {}
2573    },
2574    {
2575      "id": "qwen/qwen3-coder",
2576      "name": "Qwen: Qwen3 Coder 480B A35B",
2577      "cost_per_1m_in": 0.25,
2578      "cost_per_1m_out": 1,
2579      "cost_per_1m_in_cached": 0,
2580      "cost_per_1m_out_cached": 0,
2581      "context_window": 262144,
2582      "default_max_tokens": 131072,
2583      "can_reason": false,
2584      "supports_attachments": false,
2585      "options": {}
2586    },
2587    {
2588      "id": "qwen/qwen3-coder:exacto",
2589      "name": "Qwen: Qwen3 Coder 480B A35B (exacto)",
2590      "cost_per_1m_in": 0.38,
2591      "cost_per_1m_out": 1.53,
2592      "cost_per_1m_in_cached": 0,
2593      "cost_per_1m_out_cached": 0,
2594      "context_window": 262144,
2595      "default_max_tokens": 131072,
2596      "can_reason": true,
2597      "reasoning_levels": [
2598        "low",
2599        "medium",
2600        "high"
2601      ],
2602      "default_reasoning_effort": "medium",
2603      "supports_attachments": false,
2604      "options": {}
2605    },
2606    {
2607      "id": "qwen/qwen3-coder:free",
2608      "name": "Qwen: Qwen3 Coder 480B A35B (free)",
2609      "cost_per_1m_in": 0,
2610      "cost_per_1m_out": 0,
2611      "cost_per_1m_in_cached": 0,
2612      "cost_per_1m_out_cached": 0,
2613      "context_window": 262000,
2614      "default_max_tokens": 131000,
2615      "can_reason": false,
2616      "supports_attachments": false,
2617      "options": {}
2618    },
2619    {
2620      "id": "qwen/qwen3-coder-flash",
2621      "name": "Qwen: Qwen3 Coder Flash",
2622      "cost_per_1m_in": 0.3,
2623      "cost_per_1m_out": 1.5,
2624      "cost_per_1m_in_cached": 0,
2625      "cost_per_1m_out_cached": 0.08,
2626      "context_window": 128000,
2627      "default_max_tokens": 32768,
2628      "can_reason": false,
2629      "supports_attachments": false,
2630      "options": {}
2631    },
2632    {
2633      "id": "qwen/qwen3-coder-plus",
2634      "name": "Qwen: Qwen3 Coder Plus",
2635      "cost_per_1m_in": 1,
2636      "cost_per_1m_out": 5,
2637      "cost_per_1m_in_cached": 0,
2638      "cost_per_1m_out_cached": 0.09999999999999999,
2639      "context_window": 128000,
2640      "default_max_tokens": 32768,
2641      "can_reason": false,
2642      "supports_attachments": false,
2643      "options": {}
2644    },
2645    {
2646      "id": "qwen/qwen3-max",
2647      "name": "Qwen: Qwen3 Max",
2648      "cost_per_1m_in": 1.2,
2649      "cost_per_1m_out": 6,
2650      "cost_per_1m_in_cached": 0,
2651      "cost_per_1m_out_cached": 0.24,
2652      "context_window": 256000,
2653      "default_max_tokens": 16384,
2654      "can_reason": false,
2655      "supports_attachments": false,
2656      "options": {}
2657    },
2658    {
2659      "id": "qwen/qwen3-next-80b-a3b-instruct",
2660      "name": "Qwen: Qwen3 Next 80B A3B Instruct",
2661      "cost_per_1m_in": 0.09999999999999999,
2662      "cost_per_1m_out": 0.7999999999999999,
2663      "cost_per_1m_in_cached": 0,
2664      "cost_per_1m_out_cached": 0,
2665      "context_window": 262144,
2666      "default_max_tokens": 131072,
2667      "can_reason": false,
2668      "supports_attachments": false,
2669      "options": {}
2670    },
2671    {
2672      "id": "qwen/qwen3-next-80b-a3b-thinking",
2673      "name": "Qwen: Qwen3 Next 80B A3B Thinking",
2674      "cost_per_1m_in": 0.3,
2675      "cost_per_1m_out": 0.3,
2676      "cost_per_1m_in_cached": 0,
2677      "cost_per_1m_out_cached": 0,
2678      "context_window": 262144,
2679      "default_max_tokens": 131072,
2680      "can_reason": true,
2681      "reasoning_levels": [
2682        "low",
2683        "medium",
2684        "high"
2685      ],
2686      "default_reasoning_effort": "medium",
2687      "supports_attachments": false,
2688      "options": {}
2689    },
2690    {
2691      "id": "qwen/qwen3-vl-235b-a22b-instruct",
2692      "name": "Qwen: Qwen3 VL 235B A22B Instruct",
2693      "cost_per_1m_in": 0.22,
2694      "cost_per_1m_out": 0.88,
2695      "cost_per_1m_in_cached": 0,
2696      "cost_per_1m_out_cached": 0,
2697      "context_window": 262144,
2698      "default_max_tokens": 26214,
2699      "can_reason": false,
2700      "supports_attachments": true,
2701      "options": {}
2702    },
2703    {
2704      "id": "qwen/qwen3-vl-235b-a22b-thinking",
2705      "name": "Qwen: Qwen3 VL 235B A22B Thinking",
2706      "cost_per_1m_in": 0.3,
2707      "cost_per_1m_out": 1.2,
2708      "cost_per_1m_in_cached": 0,
2709      "cost_per_1m_out_cached": 0,
2710      "context_window": 262144,
2711      "default_max_tokens": 131072,
2712      "can_reason": true,
2713      "reasoning_levels": [
2714        "low",
2715        "medium",
2716        "high"
2717      ],
2718      "default_reasoning_effort": "medium",
2719      "supports_attachments": true,
2720      "options": {}
2721    },
2722    {
2723      "id": "qwen/qwen3-vl-30b-a3b-instruct",
2724      "name": "Qwen: Qwen3 VL 30B A3B Instruct",
2725      "cost_per_1m_in": 0.29,
2726      "cost_per_1m_out": 1,
2727      "cost_per_1m_in_cached": 0,
2728      "cost_per_1m_out_cached": 0,
2729      "context_window": 262144,
2730      "default_max_tokens": 131072,
2731      "can_reason": false,
2732      "supports_attachments": true,
2733      "options": {}
2734    },
2735    {
2736      "id": "qwen/qwen3-vl-30b-a3b-thinking",
2737      "name": "Qwen: Qwen3 VL 30B A3B Thinking",
2738      "cost_per_1m_in": 0.16,
2739      "cost_per_1m_out": 0.7999999999999999,
2740      "cost_per_1m_in_cached": 0,
2741      "cost_per_1m_out_cached": 0,
2742      "context_window": 131072,
2743      "default_max_tokens": 16384,
2744      "can_reason": true,
2745      "reasoning_levels": [
2746        "low",
2747        "medium",
2748        "high"
2749      ],
2750      "default_reasoning_effort": "medium",
2751      "supports_attachments": true,
2752      "options": {}
2753    },
2754    {
2755      "id": "qwen/qwen3-vl-8b-instruct",
2756      "name": "Qwen: Qwen3 VL 8B Instruct",
2757      "cost_per_1m_in": 0.18,
2758      "cost_per_1m_out": 0.7,
2759      "cost_per_1m_in_cached": 0,
2760      "cost_per_1m_out_cached": 0,
2761      "context_window": 256000,
2762      "default_max_tokens": 16384,
2763      "can_reason": false,
2764      "supports_attachments": true,
2765      "options": {}
2766    },
2767    {
2768      "id": "qwen/qwen3-vl-8b-thinking",
2769      "name": "Qwen: Qwen3 VL 8B Thinking",
2770      "cost_per_1m_in": 0.18,
2771      "cost_per_1m_out": 2.0999999999999996,
2772      "cost_per_1m_in_cached": 0,
2773      "cost_per_1m_out_cached": 0,
2774      "context_window": 256000,
2775      "default_max_tokens": 16384,
2776      "can_reason": true,
2777      "reasoning_levels": [
2778        "low",
2779        "medium",
2780        "high"
2781      ],
2782      "default_reasoning_effort": "medium",
2783      "supports_attachments": true,
2784      "options": {}
2785    },
2786    {
2787      "id": "stepfun-ai/step3",
2788      "name": "StepFun: Step3",
2789      "cost_per_1m_in": 0.5700000000000001,
2790      "cost_per_1m_out": 1.42,
2791      "cost_per_1m_in_cached": 0,
2792      "cost_per_1m_out_cached": 0,
2793      "context_window": 65536,
2794      "default_max_tokens": 32768,
2795      "can_reason": true,
2796      "reasoning_levels": [
2797        "low",
2798        "medium",
2799        "high"
2800      ],
2801      "default_reasoning_effort": "medium",
2802      "supports_attachments": true,
2803      "options": {}
2804    },
2805    {
2806      "id": "tngtech/deepseek-r1t2-chimera",
2807      "name": "TNG: DeepSeek R1T2 Chimera",
2808      "cost_per_1m_in": 0.3,
2809      "cost_per_1m_out": 1.2,
2810      "cost_per_1m_in_cached": 0,
2811      "cost_per_1m_out_cached": 0,
2812      "context_window": 163840,
2813      "default_max_tokens": 81920,
2814      "can_reason": true,
2815      "reasoning_levels": [
2816        "low",
2817        "medium",
2818        "high"
2819      ],
2820      "default_reasoning_effort": "medium",
2821      "supports_attachments": false,
2822      "options": {}
2823    },
2824    {
2825      "id": "tngtech/tng-r1t-chimera",
2826      "name": "TNG: R1T Chimera",
2827      "cost_per_1m_in": 0.3,
2828      "cost_per_1m_out": 1.2,
2829      "cost_per_1m_in_cached": 0,
2830      "cost_per_1m_out_cached": 0,
2831      "context_window": 163840,
2832      "default_max_tokens": 81920,
2833      "can_reason": true,
2834      "reasoning_levels": [
2835        "low",
2836        "medium",
2837        "high"
2838      ],
2839      "default_reasoning_effort": "medium",
2840      "supports_attachments": false,
2841      "options": {}
2842    },
2843    {
2844      "id": "tngtech/tng-r1t-chimera:free",
2845      "name": "TNG: R1T Chimera (free)",
2846      "cost_per_1m_in": 0,
2847      "cost_per_1m_out": 0,
2848      "cost_per_1m_in_cached": 0,
2849      "cost_per_1m_out_cached": 0,
2850      "context_window": 163840,
2851      "default_max_tokens": 81920,
2852      "can_reason": true,
2853      "reasoning_levels": [
2854        "low",
2855        "medium",
2856        "high"
2857      ],
2858      "default_reasoning_effort": "medium",
2859      "supports_attachments": false,
2860      "options": {}
2861    },
2862    {
2863      "id": "thedrummer/rocinante-12b",
2864      "name": "TheDrummer: Rocinante 12B",
2865      "cost_per_1m_in": 0.16999999999999998,
2866      "cost_per_1m_out": 0.43,
2867      "cost_per_1m_in_cached": 0,
2868      "cost_per_1m_out_cached": 0,
2869      "context_window": 32768,
2870      "default_max_tokens": 3276,
2871      "can_reason": false,
2872      "supports_attachments": false,
2873      "options": {}
2874    },
2875    {
2876      "id": "thedrummer/unslopnemo-12b",
2877      "name": "TheDrummer: UnslopNemo 12B",
2878      "cost_per_1m_in": 0.39999999999999997,
2879      "cost_per_1m_out": 0.39999999999999997,
2880      "cost_per_1m_in_cached": 0,
2881      "cost_per_1m_out_cached": 0,
2882      "context_window": 32768,
2883      "default_max_tokens": 3276,
2884      "can_reason": false,
2885      "supports_attachments": false,
2886      "options": {}
2887    },
2888    {
2889      "id": "alibaba/tongyi-deepresearch-30b-a3b",
2890      "name": "Tongyi DeepResearch 30B A3B",
2891      "cost_per_1m_in": 0.09,
2892      "cost_per_1m_out": 0.39999999999999997,
2893      "cost_per_1m_in_cached": 0,
2894      "cost_per_1m_out_cached": 0,
2895      "context_window": 131072,
2896      "default_max_tokens": 65536,
2897      "can_reason": true,
2898      "reasoning_levels": [
2899        "low",
2900        "medium",
2901        "high"
2902      ],
2903      "default_reasoning_effort": "medium",
2904      "supports_attachments": false,
2905      "options": {}
2906    },
2907    {
2908      "id": "alibaba/tongyi-deepresearch-30b-a3b:free",
2909      "name": "Tongyi DeepResearch 30B A3B (free)",
2910      "cost_per_1m_in": 0,
2911      "cost_per_1m_out": 0,
2912      "cost_per_1m_in_cached": 0,
2913      "cost_per_1m_out_cached": 0,
2914      "context_window": 131072,
2915      "default_max_tokens": 65536,
2916      "can_reason": true,
2917      "reasoning_levels": [
2918        "low",
2919        "medium",
2920        "high"
2921      ],
2922      "default_reasoning_effort": "medium",
2923      "supports_attachments": false,
2924      "options": {}
2925    },
2926    {
2927      "id": "z-ai/glm-4-32b",
2928      "name": "Z.AI: GLM 4 32B ",
2929      "cost_per_1m_in": 0.09999999999999999,
2930      "cost_per_1m_out": 0.09999999999999999,
2931      "cost_per_1m_in_cached": 0,
2932      "cost_per_1m_out_cached": 0,
2933      "context_window": 128000,
2934      "default_max_tokens": 12800,
2935      "can_reason": false,
2936      "supports_attachments": false,
2937      "options": {}
2938    },
2939    {
2940      "id": "z-ai/glm-4.5",
2941      "name": "Z.AI: GLM 4.5",
2942      "cost_per_1m_in": 0.35,
2943      "cost_per_1m_out": 1.5,
2944      "cost_per_1m_in_cached": 0,
2945      "cost_per_1m_out_cached": 0,
2946      "context_window": 131072,
2947      "default_max_tokens": 65536,
2948      "can_reason": true,
2949      "reasoning_levels": [
2950        "low",
2951        "medium",
2952        "high"
2953      ],
2954      "default_reasoning_effort": "medium",
2955      "supports_attachments": false,
2956      "options": {}
2957    },
2958    {
2959      "id": "z-ai/glm-4.5-air",
2960      "name": "Z.AI: GLM 4.5 Air",
2961      "cost_per_1m_in": 0.10400000000000001,
2962      "cost_per_1m_out": 0.6799999999999999,
2963      "cost_per_1m_in_cached": 0,
2964      "cost_per_1m_out_cached": 0,
2965      "context_window": 131072,
2966      "default_max_tokens": 49152,
2967      "can_reason": true,
2968      "reasoning_levels": [
2969        "low",
2970        "medium",
2971        "high"
2972      ],
2973      "default_reasoning_effort": "medium",
2974      "supports_attachments": false,
2975      "options": {}
2976    },
2977    {
2978      "id": "z-ai/glm-4.5-air:free",
2979      "name": "Z.AI: GLM 4.5 Air (free)",
2980      "cost_per_1m_in": 0,
2981      "cost_per_1m_out": 0,
2982      "cost_per_1m_in_cached": 0,
2983      "cost_per_1m_out_cached": 0,
2984      "context_window": 131072,
2985      "default_max_tokens": 48000,
2986      "can_reason": true,
2987      "reasoning_levels": [
2988        "low",
2989        "medium",
2990        "high"
2991      ],
2992      "default_reasoning_effort": "medium",
2993      "supports_attachments": false,
2994      "options": {}
2995    },
2996    {
2997      "id": "z-ai/glm-4.5v",
2998      "name": "Z.AI: GLM 4.5V",
2999      "cost_per_1m_in": 0.6,
3000      "cost_per_1m_out": 1.7999999999999998,
3001      "cost_per_1m_in_cached": 0,
3002      "cost_per_1m_out_cached": 0.11,
3003      "context_window": 65536,
3004      "default_max_tokens": 8192,
3005      "can_reason": true,
3006      "reasoning_levels": [
3007        "low",
3008        "medium",
3009        "high"
3010      ],
3011      "default_reasoning_effort": "medium",
3012      "supports_attachments": true,
3013      "options": {}
3014    },
3015    {
3016      "id": "z-ai/glm-4.6",
3017      "name": "Z.AI: GLM 4.6",
3018      "cost_per_1m_in": 0.48,
3019      "cost_per_1m_out": 1.76,
3020      "cost_per_1m_in_cached": 0,
3021      "cost_per_1m_out_cached": 0.11,
3022      "context_window": 204800,
3023      "default_max_tokens": 65536,
3024      "can_reason": true,
3025      "reasoning_levels": [
3026        "low",
3027        "medium",
3028        "high"
3029      ],
3030      "default_reasoning_effort": "medium",
3031      "supports_attachments": false,
3032      "options": {}
3033    },
3034    {
3035      "id": "z-ai/glm-4.6:exacto",
3036      "name": "Z.AI: GLM 4.6 (exacto)",
3037      "cost_per_1m_in": 0.48,
3038      "cost_per_1m_out": 1.76,
3039      "cost_per_1m_in_cached": 0,
3040      "cost_per_1m_out_cached": 0,
3041      "context_window": 204800,
3042      "default_max_tokens": 65536,
3043      "can_reason": true,
3044      "reasoning_levels": [
3045        "low",
3046        "medium",
3047        "high"
3048      ],
3049      "default_reasoning_effort": "medium",
3050      "supports_attachments": false,
3051      "options": {}
3052    },
3053    {
3054      "id": "x-ai/grok-3",
3055      "name": "xAI: Grok 3",
3056      "cost_per_1m_in": 5,
3057      "cost_per_1m_out": 25,
3058      "cost_per_1m_in_cached": 0,
3059      "cost_per_1m_out_cached": 1.25,
3060      "context_window": 131072,
3061      "default_max_tokens": 13107,
3062      "can_reason": false,
3063      "supports_attachments": false,
3064      "options": {}
3065    },
3066    {
3067      "id": "x-ai/grok-3-beta",
3068      "name": "xAI: Grok 3 Beta",
3069      "cost_per_1m_in": 5,
3070      "cost_per_1m_out": 25,
3071      "cost_per_1m_in_cached": 0,
3072      "cost_per_1m_out_cached": 1.25,
3073      "context_window": 131072,
3074      "default_max_tokens": 13107,
3075      "can_reason": false,
3076      "supports_attachments": false,
3077      "options": {}
3078    },
3079    {
3080      "id": "x-ai/grok-3-mini",
3081      "name": "xAI: Grok 3 Mini",
3082      "cost_per_1m_in": 0.3,
3083      "cost_per_1m_out": 0.5,
3084      "cost_per_1m_in_cached": 0,
3085      "cost_per_1m_out_cached": 0.075,
3086      "context_window": 131072,
3087      "default_max_tokens": 13107,
3088      "can_reason": true,
3089      "reasoning_levels": [
3090        "low",
3091        "medium",
3092        "high"
3093      ],
3094      "default_reasoning_effort": "medium",
3095      "supports_attachments": false,
3096      "options": {}
3097    },
3098    {
3099      "id": "x-ai/grok-3-mini-beta",
3100      "name": "xAI: Grok 3 Mini Beta",
3101      "cost_per_1m_in": 0.3,
3102      "cost_per_1m_out": 0.5,
3103      "cost_per_1m_in_cached": 0,
3104      "cost_per_1m_out_cached": 0.075,
3105      "context_window": 131072,
3106      "default_max_tokens": 13107,
3107      "can_reason": true,
3108      "reasoning_levels": [
3109        "low",
3110        "medium",
3111        "high"
3112      ],
3113      "default_reasoning_effort": "medium",
3114      "supports_attachments": false,
3115      "options": {}
3116    },
3117    {
3118      "id": "x-ai/grok-4",
3119      "name": "xAI: Grok 4",
3120      "cost_per_1m_in": 3,
3121      "cost_per_1m_out": 15,
3122      "cost_per_1m_in_cached": 0,
3123      "cost_per_1m_out_cached": 0.75,
3124      "context_window": 256000,
3125      "default_max_tokens": 25600,
3126      "can_reason": true,
3127      "reasoning_levels": [
3128        "low",
3129        "medium",
3130        "high"
3131      ],
3132      "default_reasoning_effort": "medium",
3133      "supports_attachments": true,
3134      "options": {}
3135    },
3136    {
3137      "id": "x-ai/grok-4-fast",
3138      "name": "xAI: Grok 4 Fast",
3139      "cost_per_1m_in": 0.19999999999999998,
3140      "cost_per_1m_out": 0.5,
3141      "cost_per_1m_in_cached": 0,
3142      "cost_per_1m_out_cached": 0.049999999999999996,
3143      "context_window": 2000000,
3144      "default_max_tokens": 15000,
3145      "can_reason": true,
3146      "reasoning_levels": [
3147        "low",
3148        "medium",
3149        "high"
3150      ],
3151      "default_reasoning_effort": "medium",
3152      "supports_attachments": true,
3153      "options": {}
3154    },
3155    {
3156      "id": "x-ai/grok-4.1-fast:free",
3157      "name": "xAI: Grok 4.1 Fast (free)",
3158      "cost_per_1m_in": 0,
3159      "cost_per_1m_out": 0,
3160      "cost_per_1m_in_cached": 0,
3161      "cost_per_1m_out_cached": 0,
3162      "context_window": 2000000,
3163      "default_max_tokens": 15000,
3164      "can_reason": true,
3165      "reasoning_levels": [
3166        "low",
3167        "medium",
3168        "high"
3169      ],
3170      "default_reasoning_effort": "medium",
3171      "supports_attachments": true,
3172      "options": {}
3173    },
3174    {
3175      "id": "x-ai/grok-code-fast-1",
3176      "name": "xAI: Grok Code Fast 1",
3177      "cost_per_1m_in": 0.19999999999999998,
3178      "cost_per_1m_out": 1.5,
3179      "cost_per_1m_in_cached": 0,
3180      "cost_per_1m_out_cached": 0.02,
3181      "context_window": 256000,
3182      "default_max_tokens": 5000,
3183      "can_reason": true,
3184      "reasoning_levels": [
3185        "low",
3186        "medium",
3187        "high"
3188      ],
3189      "default_reasoning_effort": "medium",
3190      "supports_attachments": false,
3191      "options": {}
3192    }
3193  ],
3194  "default_headers": {
3195    "HTTP-Referer": "https://charm.land",
3196    "X-Title": "Crush"
3197  }
3198}