openrouter.json

   1{
   2  "name": "OpenRouter",
   3  "id": "openrouter",
   4  "api_key": "$OPENROUTER_API_KEY",
   5  "api_endpoint": "https://openrouter.ai/api/v1",
   6  "type": "openrouter",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-3.5-haiku",
   9  "models": [
  10    {
  11      "id": "ai21/jamba-large-1.7",
  12      "name": "AI21: Jamba Large 1.7",
  13      "cost_per_1m_in": 2,
  14      "cost_per_1m_out": 8,
  15      "cost_per_1m_in_cached": 0,
  16      "cost_per_1m_out_cached": 0,
  17      "context_window": 256000,
  18      "default_max_tokens": 2048,
  19      "can_reason": false,
  20      "supports_attachments": false,
  21      "options": {}
  22    },
  23    {
  24      "id": "ai21/jamba-mini-1.7",
  25      "name": "AI21: Jamba Mini 1.7",
  26      "cost_per_1m_in": 0.19999999999999998,
  27      "cost_per_1m_out": 0.39999999999999997,
  28      "cost_per_1m_in_cached": 0,
  29      "cost_per_1m_out_cached": 0,
  30      "context_window": 256000,
  31      "default_max_tokens": 2048,
  32      "can_reason": false,
  33      "supports_attachments": false,
  34      "options": {}
  35    },
  36    {
  37      "id": "allenai/olmo-3-7b-instruct",
  38      "name": "AllenAI: Olmo 3 7B Instruct",
  39      "cost_per_1m_in": 0.09999999999999999,
  40      "cost_per_1m_out": 0.19999999999999998,
  41      "cost_per_1m_in_cached": 0,
  42      "cost_per_1m_out_cached": 0,
  43      "context_window": 65536,
  44      "default_max_tokens": 32768,
  45      "can_reason": false,
  46      "supports_attachments": false,
  47      "options": {}
  48    },
  49    {
  50      "id": "amazon/nova-2-lite-v1",
  51      "name": "Amazon: Nova 2 Lite",
  52      "cost_per_1m_in": 0.3,
  53      "cost_per_1m_out": 2.5,
  54      "cost_per_1m_in_cached": 0,
  55      "cost_per_1m_out_cached": 0,
  56      "context_window": 1000000,
  57      "default_max_tokens": 32767,
  58      "can_reason": true,
  59      "reasoning_levels": [
  60        "low",
  61        "medium",
  62        "high"
  63      ],
  64      "default_reasoning_effort": "medium",
  65      "supports_attachments": true,
  66      "options": {}
  67    },
  68    {
  69      "id": "amazon/nova-2-lite-v1:free",
  70      "name": "Amazon: Nova 2 Lite (free)",
  71      "cost_per_1m_in": 0,
  72      "cost_per_1m_out": 0,
  73      "cost_per_1m_in_cached": 0,
  74      "cost_per_1m_out_cached": 0,
  75      "context_window": 1000000,
  76      "default_max_tokens": 32767,
  77      "can_reason": true,
  78      "reasoning_levels": [
  79        "low",
  80        "medium",
  81        "high"
  82      ],
  83      "default_reasoning_effort": "medium",
  84      "supports_attachments": true,
  85      "options": {}
  86    },
  87    {
  88      "id": "amazon/nova-lite-v1",
  89      "name": "Amazon: Nova Lite 1.0",
  90      "cost_per_1m_in": 0.06,
  91      "cost_per_1m_out": 0.24,
  92      "cost_per_1m_in_cached": 0,
  93      "cost_per_1m_out_cached": 0,
  94      "context_window": 300000,
  95      "default_max_tokens": 2560,
  96      "can_reason": false,
  97      "supports_attachments": true,
  98      "options": {}
  99    },
 100    {
 101      "id": "amazon/nova-micro-v1",
 102      "name": "Amazon: Nova Micro 1.0",
 103      "cost_per_1m_in": 0.035,
 104      "cost_per_1m_out": 0.14,
 105      "cost_per_1m_in_cached": 0,
 106      "cost_per_1m_out_cached": 0,
 107      "context_window": 128000,
 108      "default_max_tokens": 2560,
 109      "can_reason": false,
 110      "supports_attachments": false,
 111      "options": {}
 112    },
 113    {
 114      "id": "amazon/nova-premier-v1",
 115      "name": "Amazon: Nova Premier 1.0",
 116      "cost_per_1m_in": 2.5,
 117      "cost_per_1m_out": 12.5,
 118      "cost_per_1m_in_cached": 0,
 119      "cost_per_1m_out_cached": 0.625,
 120      "context_window": 1000000,
 121      "default_max_tokens": 16000,
 122      "can_reason": false,
 123      "supports_attachments": true,
 124      "options": {}
 125    },
 126    {
 127      "id": "amazon/nova-pro-v1",
 128      "name": "Amazon: Nova Pro 1.0",
 129      "cost_per_1m_in": 0.7999999999999999,
 130      "cost_per_1m_out": 3.1999999999999997,
 131      "cost_per_1m_in_cached": 0,
 132      "cost_per_1m_out_cached": 0,
 133      "context_window": 300000,
 134      "default_max_tokens": 2560,
 135      "can_reason": false,
 136      "supports_attachments": true,
 137      "options": {}
 138    },
 139    {
 140      "id": "anthropic/claude-3-haiku",
 141      "name": "Anthropic: Claude 3 Haiku",
 142      "cost_per_1m_in": 0.25,
 143      "cost_per_1m_out": 1.25,
 144      "cost_per_1m_in_cached": 0.3,
 145      "cost_per_1m_out_cached": 0.03,
 146      "context_window": 200000,
 147      "default_max_tokens": 2048,
 148      "can_reason": false,
 149      "supports_attachments": true,
 150      "options": {}
 151    },
 152    {
 153      "id": "anthropic/claude-3-opus",
 154      "name": "Anthropic: Claude 3 Opus",
 155      "cost_per_1m_in": 15,
 156      "cost_per_1m_out": 75,
 157      "cost_per_1m_in_cached": 18.75,
 158      "cost_per_1m_out_cached": 1.5,
 159      "context_window": 200000,
 160      "default_max_tokens": 2048,
 161      "can_reason": false,
 162      "supports_attachments": true,
 163      "options": {}
 164    },
 165    {
 166      "id": "anthropic/claude-3.5-haiku",
 167      "name": "Anthropic: Claude 3.5 Haiku",
 168      "cost_per_1m_in": 0.7999999999999999,
 169      "cost_per_1m_out": 4,
 170      "cost_per_1m_in_cached": 1,
 171      "cost_per_1m_out_cached": 0.08,
 172      "context_window": 200000,
 173      "default_max_tokens": 4096,
 174      "can_reason": false,
 175      "supports_attachments": true,
 176      "options": {}
 177    },
 178    {
 179      "id": "anthropic/claude-3.5-haiku-20241022",
 180      "name": "Anthropic: Claude 3.5 Haiku (2024-10-22)",
 181      "cost_per_1m_in": 0.7999999999999999,
 182      "cost_per_1m_out": 4,
 183      "cost_per_1m_in_cached": 1,
 184      "cost_per_1m_out_cached": 0.08,
 185      "context_window": 200000,
 186      "default_max_tokens": 4096,
 187      "can_reason": false,
 188      "supports_attachments": true,
 189      "options": {}
 190    },
 191    {
 192      "id": "anthropic/claude-3.5-sonnet",
 193      "name": "Anthropic: Claude 3.5 Sonnet",
 194      "cost_per_1m_in": 6,
 195      "cost_per_1m_out": 30,
 196      "cost_per_1m_in_cached": 7.5,
 197      "cost_per_1m_out_cached": 0.6,
 198      "context_window": 200000,
 199      "default_max_tokens": 4096,
 200      "can_reason": false,
 201      "supports_attachments": true,
 202      "options": {}
 203    },
 204    {
 205      "id": "anthropic/claude-3.7-sonnet",
 206      "name": "Anthropic: Claude 3.7 Sonnet",
 207      "cost_per_1m_in": 3,
 208      "cost_per_1m_out": 15,
 209      "cost_per_1m_in_cached": 3.75,
 210      "cost_per_1m_out_cached": 0.3,
 211      "context_window": 200000,
 212      "default_max_tokens": 32000,
 213      "can_reason": true,
 214      "reasoning_levels": [
 215        "low",
 216        "medium",
 217        "high"
 218      ],
 219      "default_reasoning_effort": "medium",
 220      "supports_attachments": true,
 221      "options": {}
 222    },
 223    {
 224      "id": "anthropic/claude-3.7-sonnet:thinking",
 225      "name": "Anthropic: Claude 3.7 Sonnet (thinking)",
 226      "cost_per_1m_in": 3,
 227      "cost_per_1m_out": 15,
 228      "cost_per_1m_in_cached": 3.75,
 229      "cost_per_1m_out_cached": 0.3,
 230      "context_window": 200000,
 231      "default_max_tokens": 32000,
 232      "can_reason": true,
 233      "reasoning_levels": [
 234        "low",
 235        "medium",
 236        "high"
 237      ],
 238      "default_reasoning_effort": "medium",
 239      "supports_attachments": true,
 240      "options": {}
 241    },
 242    {
 243      "id": "anthropic/claude-haiku-4.5",
 244      "name": "Anthropic: Claude Haiku 4.5",
 245      "cost_per_1m_in": 1,
 246      "cost_per_1m_out": 5,
 247      "cost_per_1m_in_cached": 1.25,
 248      "cost_per_1m_out_cached": 0.09999999999999999,
 249      "context_window": 200000,
 250      "default_max_tokens": 32000,
 251      "can_reason": true,
 252      "reasoning_levels": [
 253        "low",
 254        "medium",
 255        "high"
 256      ],
 257      "default_reasoning_effort": "medium",
 258      "supports_attachments": true,
 259      "options": {}
 260    },
 261    {
 262      "id": "anthropic/claude-opus-4",
 263      "name": "Anthropic: Claude Opus 4",
 264      "cost_per_1m_in": 15,
 265      "cost_per_1m_out": 75,
 266      "cost_per_1m_in_cached": 18.75,
 267      "cost_per_1m_out_cached": 1.5,
 268      "context_window": 200000,
 269      "default_max_tokens": 16000,
 270      "can_reason": true,
 271      "reasoning_levels": [
 272        "low",
 273        "medium",
 274        "high"
 275      ],
 276      "default_reasoning_effort": "medium",
 277      "supports_attachments": true,
 278      "options": {}
 279    },
 280    {
 281      "id": "anthropic/claude-opus-4.1",
 282      "name": "Anthropic: Claude Opus 4.1",
 283      "cost_per_1m_in": 15,
 284      "cost_per_1m_out": 75,
 285      "cost_per_1m_in_cached": 18.75,
 286      "cost_per_1m_out_cached": 1.5,
 287      "context_window": 200000,
 288      "default_max_tokens": 16000,
 289      "can_reason": true,
 290      "reasoning_levels": [
 291        "low",
 292        "medium",
 293        "high"
 294      ],
 295      "default_reasoning_effort": "medium",
 296      "supports_attachments": true,
 297      "options": {}
 298    },
 299    {
 300      "id": "anthropic/claude-opus-4.5",
 301      "name": "Anthropic: Claude Opus 4.5",
 302      "cost_per_1m_in": 5,
 303      "cost_per_1m_out": 25,
 304      "cost_per_1m_in_cached": 6.25,
 305      "cost_per_1m_out_cached": 0.5,
 306      "context_window": 200000,
 307      "default_max_tokens": 32000,
 308      "can_reason": true,
 309      "reasoning_levels": [
 310        "low",
 311        "medium",
 312        "high"
 313      ],
 314      "default_reasoning_effort": "medium",
 315      "supports_attachments": true,
 316      "options": {}
 317    },
 318    {
 319      "id": "anthropic/claude-sonnet-4",
 320      "name": "Anthropic: Claude Sonnet 4",
 321      "cost_per_1m_in": 3,
 322      "cost_per_1m_out": 15,
 323      "cost_per_1m_in_cached": 3.75,
 324      "cost_per_1m_out_cached": 0.3,
 325      "context_window": 1000000,
 326      "default_max_tokens": 32000,
 327      "can_reason": true,
 328      "reasoning_levels": [
 329        "low",
 330        "medium",
 331        "high"
 332      ],
 333      "default_reasoning_effort": "medium",
 334      "supports_attachments": true,
 335      "options": {}
 336    },
 337    {
 338      "id": "anthropic/claude-sonnet-4.5",
 339      "name": "Anthropic: Claude Sonnet 4.5",
 340      "cost_per_1m_in": 3,
 341      "cost_per_1m_out": 15,
 342      "cost_per_1m_in_cached": 3.75,
 343      "cost_per_1m_out_cached": 0.3,
 344      "context_window": 1000000,
 345      "default_max_tokens": 32000,
 346      "can_reason": true,
 347      "reasoning_levels": [
 348        "low",
 349        "medium",
 350        "high"
 351      ],
 352      "default_reasoning_effort": "medium",
 353      "supports_attachments": true,
 354      "options": {}
 355    },
 356    {
 357      "id": "arcee-ai/trinity-mini",
 358      "name": "Arcee AI: Trinity Mini",
 359      "cost_per_1m_in": 0.045,
 360      "cost_per_1m_out": 0.15,
 361      "cost_per_1m_in_cached": 0,
 362      "cost_per_1m_out_cached": 0,
 363      "context_window": 131072,
 364      "default_max_tokens": 13107,
 365      "can_reason": true,
 366      "reasoning_levels": [
 367        "low",
 368        "medium",
 369        "high"
 370      ],
 371      "default_reasoning_effort": "medium",
 372      "supports_attachments": false,
 373      "options": {}
 374    },
 375    {
 376      "id": "arcee-ai/trinity-mini:free",
 377      "name": "Arcee AI: Trinity Mini (free)",
 378      "cost_per_1m_in": 0,
 379      "cost_per_1m_out": 0,
 380      "cost_per_1m_in_cached": 0,
 381      "cost_per_1m_out_cached": 0,
 382      "context_window": 131072,
 383      "default_max_tokens": 13107,
 384      "can_reason": true,
 385      "reasoning_levels": [
 386        "low",
 387        "medium",
 388        "high"
 389      ],
 390      "default_reasoning_effort": "medium",
 391      "supports_attachments": false,
 392      "options": {}
 393    },
 394    {
 395      "id": "arcee-ai/virtuoso-large",
 396      "name": "Arcee AI: Virtuoso Large",
 397      "cost_per_1m_in": 0.75,
 398      "cost_per_1m_out": 1.2,
 399      "cost_per_1m_in_cached": 0,
 400      "cost_per_1m_out_cached": 0,
 401      "context_window": 131072,
 402      "default_max_tokens": 32000,
 403      "can_reason": false,
 404      "supports_attachments": false,
 405      "options": {}
 406    },
 407    {
 408      "id": "baidu/ernie-4.5-21b-a3b",
 409      "name": "Baidu: ERNIE 4.5 21B A3B",
 410      "cost_per_1m_in": 0.056,
 411      "cost_per_1m_out": 0.224,
 412      "cost_per_1m_in_cached": 0,
 413      "cost_per_1m_out_cached": 0,
 414      "context_window": 120000,
 415      "default_max_tokens": 4000,
 416      "can_reason": false,
 417      "supports_attachments": false,
 418      "options": {}
 419    },
 420    {
 421      "id": "baidu/ernie-4.5-vl-28b-a3b",
 422      "name": "Baidu: ERNIE 4.5 VL 28B A3B",
 423      "cost_per_1m_in": 0.112,
 424      "cost_per_1m_out": 0.448,
 425      "cost_per_1m_in_cached": 0,
 426      "cost_per_1m_out_cached": 0,
 427      "context_window": 30000,
 428      "default_max_tokens": 4000,
 429      "can_reason": true,
 430      "reasoning_levels": [
 431        "low",
 432        "medium",
 433        "high"
 434      ],
 435      "default_reasoning_effort": "medium",
 436      "supports_attachments": true,
 437      "options": {}
 438    },
 439    {
 440      "id": "deepcogito/cogito-v2-preview-llama-109b-moe",
 441      "name": "Cogito V2 Preview Llama 109B",
 442      "cost_per_1m_in": 0.18,
 443      "cost_per_1m_out": 0.59,
 444      "cost_per_1m_in_cached": 0,
 445      "cost_per_1m_out_cached": 0,
 446      "context_window": 32767,
 447      "default_max_tokens": 3276,
 448      "can_reason": true,
 449      "reasoning_levels": [
 450        "low",
 451        "medium",
 452        "high"
 453      ],
 454      "default_reasoning_effort": "medium",
 455      "supports_attachments": true,
 456      "options": {}
 457    },
 458    {
 459      "id": "cohere/command-r-08-2024",
 460      "name": "Cohere: Command R (08-2024)",
 461      "cost_per_1m_in": 0.15,
 462      "cost_per_1m_out": 0.6,
 463      "cost_per_1m_in_cached": 0,
 464      "cost_per_1m_out_cached": 0,
 465      "context_window": 128000,
 466      "default_max_tokens": 2000,
 467      "can_reason": false,
 468      "supports_attachments": false,
 469      "options": {}
 470    },
 471    {
 472      "id": "cohere/command-r-plus-08-2024",
 473      "name": "Cohere: Command R+ (08-2024)",
 474      "cost_per_1m_in": 2.5,
 475      "cost_per_1m_out": 10,
 476      "cost_per_1m_in_cached": 0,
 477      "cost_per_1m_out_cached": 0,
 478      "context_window": 128000,
 479      "default_max_tokens": 2000,
 480      "can_reason": false,
 481      "supports_attachments": false,
 482      "options": {}
 483    },
 484    {
 485      "id": "deepcogito/cogito-v2-preview-llama-405b",
 486      "name": "Deep Cogito: Cogito V2 Preview Llama 405B",
 487      "cost_per_1m_in": 3.5,
 488      "cost_per_1m_out": 3.5,
 489      "cost_per_1m_in_cached": 0,
 490      "cost_per_1m_out_cached": 0,
 491      "context_window": 32768,
 492      "default_max_tokens": 3276,
 493      "can_reason": true,
 494      "reasoning_levels": [
 495        "low",
 496        "medium",
 497        "high"
 498      ],
 499      "default_reasoning_effort": "medium",
 500      "supports_attachments": false,
 501      "options": {}
 502    },
 503    {
 504      "id": "deepcogito/cogito-v2-preview-llama-70b",
 505      "name": "Deep Cogito: Cogito V2 Preview Llama 70B",
 506      "cost_per_1m_in": 0.88,
 507      "cost_per_1m_out": 0.88,
 508      "cost_per_1m_in_cached": 0,
 509      "cost_per_1m_out_cached": 0,
 510      "context_window": 32768,
 511      "default_max_tokens": 3276,
 512      "can_reason": true,
 513      "reasoning_levels": [
 514        "low",
 515        "medium",
 516        "high"
 517      ],
 518      "default_reasoning_effort": "medium",
 519      "supports_attachments": false,
 520      "options": {}
 521    },
 522    {
 523      "id": "deepseek/deepseek-chat",
 524      "name": "DeepSeek: DeepSeek V3",
 525      "cost_per_1m_in": 0.32,
 526      "cost_per_1m_out": 1.04,
 527      "cost_per_1m_in_cached": 0,
 528      "cost_per_1m_out_cached": 0,
 529      "context_window": 64000,
 530      "default_max_tokens": 8000,
 531      "can_reason": false,
 532      "supports_attachments": false,
 533      "options": {}
 534    },
 535    {
 536      "id": "deepseek/deepseek-chat-v3.1",
 537      "name": "DeepSeek: DeepSeek V3.1",
 538      "cost_per_1m_in": 0.21,
 539      "cost_per_1m_out": 0.7899999999999999,
 540      "cost_per_1m_in_cached": 0,
 541      "cost_per_1m_out_cached": 0.16799999999999998,
 542      "context_window": 163840,
 543      "default_max_tokens": 16384,
 544      "can_reason": true,
 545      "reasoning_levels": [
 546        "low",
 547        "medium",
 548        "high"
 549      ],
 550      "default_reasoning_effort": "medium",
 551      "supports_attachments": false,
 552      "options": {}
 553    },
 554    {
 555      "id": "deepseek/deepseek-v3.1-terminus",
 556      "name": "DeepSeek: DeepSeek V3.1 Terminus",
 557      "cost_per_1m_in": 0.21,
 558      "cost_per_1m_out": 0.7899999999999999,
 559      "cost_per_1m_in_cached": 0,
 560      "cost_per_1m_out_cached": 0.16799999999999998,
 561      "context_window": 163840,
 562      "default_max_tokens": 16384,
 563      "can_reason": true,
 564      "reasoning_levels": [
 565        "low",
 566        "medium",
 567        "high"
 568      ],
 569      "default_reasoning_effort": "medium",
 570      "supports_attachments": false,
 571      "options": {}
 572    },
 573    {
 574      "id": "deepseek/deepseek-v3.1-terminus:exacto",
 575      "name": "DeepSeek: DeepSeek V3.1 Terminus (exacto)",
 576      "cost_per_1m_in": 0.3,
 577      "cost_per_1m_out": 1.2,
 578      "cost_per_1m_in_cached": 0,
 579      "cost_per_1m_out_cached": 0,
 580      "context_window": 131072,
 581      "default_max_tokens": 16384,
 582      "can_reason": true,
 583      "reasoning_levels": [
 584        "low",
 585        "medium",
 586        "high"
 587      ],
 588      "default_reasoning_effort": "medium",
 589      "supports_attachments": false,
 590      "options": {}
 591    },
 592    {
 593      "id": "deepseek/deepseek-v3.2",
 594      "name": "DeepSeek: DeepSeek V3.2",
 595      "cost_per_1m_in": 0.27,
 596      "cost_per_1m_out": 0.41,
 597      "cost_per_1m_in_cached": 0,
 598      "cost_per_1m_out_cached": 0,
 599      "context_window": 163840,
 600      "default_max_tokens": 8192,
 601      "can_reason": true,
 602      "reasoning_levels": [
 603        "low",
 604        "medium",
 605        "high"
 606      ],
 607      "default_reasoning_effort": "medium",
 608      "supports_attachments": false,
 609      "options": {}
 610    },
 611    {
 612      "id": "deepseek/deepseek-v3.2-exp",
 613      "name": "DeepSeek: DeepSeek V3.2 Exp",
 614      "cost_per_1m_in": 0.21,
 615      "cost_per_1m_out": 0.32,
 616      "cost_per_1m_in_cached": 0,
 617      "cost_per_1m_out_cached": 0.16799999999999998,
 618      "context_window": 163840,
 619      "default_max_tokens": 16384,
 620      "can_reason": true,
 621      "reasoning_levels": [
 622        "low",
 623        "medium",
 624        "high"
 625      ],
 626      "default_reasoning_effort": "medium",
 627      "supports_attachments": false,
 628      "options": {}
 629    },
 630    {
 631      "id": "deepseek/deepseek-r1",
 632      "name": "DeepSeek: R1",
 633      "cost_per_1m_in": 0.7,
 634      "cost_per_1m_out": 2.4,
 635      "cost_per_1m_in_cached": 0,
 636      "cost_per_1m_out_cached": 0,
 637      "context_window": 163840,
 638      "default_max_tokens": 81920,
 639      "can_reason": true,
 640      "reasoning_levels": [
 641        "low",
 642        "medium",
 643        "high"
 644      ],
 645      "default_reasoning_effort": "medium",
 646      "supports_attachments": false,
 647      "options": {}
 648    },
 649    {
 650      "id": "deepseek/deepseek-r1-0528",
 651      "name": "DeepSeek: R1 0528",
 652      "cost_per_1m_in": 0.39999999999999997,
 653      "cost_per_1m_out": 1.75,
 654      "cost_per_1m_in_cached": 0,
 655      "cost_per_1m_out_cached": 0,
 656      "context_window": 163840,
 657      "default_max_tokens": 81920,
 658      "can_reason": true,
 659      "reasoning_levels": [
 660        "low",
 661        "medium",
 662        "high"
 663      ],
 664      "default_reasoning_effort": "medium",
 665      "supports_attachments": false,
 666      "options": {}
 667    },
 668    {
 669      "id": "deepseek/deepseek-r1-distill-llama-70b",
 670      "name": "DeepSeek: R1 Distill Llama 70B",
 671      "cost_per_1m_in": 0.03,
 672      "cost_per_1m_out": 0.13,
 673      "cost_per_1m_in_cached": 0,
 674      "cost_per_1m_out_cached": 0,
 675      "context_window": 131072,
 676      "default_max_tokens": 65536,
 677      "can_reason": true,
 678      "reasoning_levels": [
 679        "low",
 680        "medium",
 681        "high"
 682      ],
 683      "default_reasoning_effort": "medium",
 684      "supports_attachments": false,
 685      "options": {}
 686    },
 687    {
 688      "id": "google/gemini-2.0-flash-001",
 689      "name": "Google: Gemini 2.0 Flash",
 690      "cost_per_1m_in": 0.09999999999999999,
 691      "cost_per_1m_out": 0.39999999999999997,
 692      "cost_per_1m_in_cached": 0.18330000000000002,
 693      "cost_per_1m_out_cached": 0.024999999999999998,
 694      "context_window": 1048576,
 695      "default_max_tokens": 4096,
 696      "can_reason": false,
 697      "supports_attachments": true,
 698      "options": {}
 699    },
 700    {
 701      "id": "google/gemini-2.0-flash-exp:free",
 702      "name": "Google: Gemini 2.0 Flash Experimental (free)",
 703      "cost_per_1m_in": 0,
 704      "cost_per_1m_out": 0,
 705      "cost_per_1m_in_cached": 0,
 706      "cost_per_1m_out_cached": 0,
 707      "context_window": 1048576,
 708      "default_max_tokens": 4096,
 709      "can_reason": false,
 710      "supports_attachments": true,
 711      "options": {}
 712    },
 713    {
 714      "id": "google/gemini-2.0-flash-lite-001",
 715      "name": "Google: Gemini 2.0 Flash Lite",
 716      "cost_per_1m_in": 0.075,
 717      "cost_per_1m_out": 0.3,
 718      "cost_per_1m_in_cached": 0,
 719      "cost_per_1m_out_cached": 0,
 720      "context_window": 1048576,
 721      "default_max_tokens": 4096,
 722      "can_reason": false,
 723      "supports_attachments": true,
 724      "options": {}
 725    },
 726    {
 727      "id": "google/gemini-2.5-flash",
 728      "name": "Google: Gemini 2.5 Flash",
 729      "cost_per_1m_in": 0.3,
 730      "cost_per_1m_out": 2.5,
 731      "cost_per_1m_in_cached": 0.3833,
 732      "cost_per_1m_out_cached": 0.03,
 733      "context_window": 1048576,
 734      "default_max_tokens": 32767,
 735      "can_reason": true,
 736      "reasoning_levels": [
 737        "low",
 738        "medium",
 739        "high"
 740      ],
 741      "default_reasoning_effort": "medium",
 742      "supports_attachments": true,
 743      "options": {}
 744    },
 745    {
 746      "id": "google/gemini-2.5-flash-lite",
 747      "name": "Google: Gemini 2.5 Flash Lite",
 748      "cost_per_1m_in": 0.09999999999999999,
 749      "cost_per_1m_out": 0.39999999999999997,
 750      "cost_per_1m_in_cached": 0.18330000000000002,
 751      "cost_per_1m_out_cached": 0.01,
 752      "context_window": 1048576,
 753      "default_max_tokens": 32767,
 754      "can_reason": true,
 755      "reasoning_levels": [
 756        "low",
 757        "medium",
 758        "high"
 759      ],
 760      "default_reasoning_effort": "medium",
 761      "supports_attachments": true,
 762      "options": {}
 763    },
 764    {
 765      "id": "google/gemini-2.5-flash-lite-preview-09-2025",
 766      "name": "Google: Gemini 2.5 Flash Lite Preview 09-2025",
 767      "cost_per_1m_in": 0.09999999999999999,
 768      "cost_per_1m_out": 0.39999999999999997,
 769      "cost_per_1m_in_cached": 0,
 770      "cost_per_1m_out_cached": 0,
 771      "context_window": 1048576,
 772      "default_max_tokens": 32768,
 773      "can_reason": true,
 774      "reasoning_levels": [
 775        "low",
 776        "medium",
 777        "high"
 778      ],
 779      "default_reasoning_effort": "medium",
 780      "supports_attachments": true,
 781      "options": {}
 782    },
 783    {
 784      "id": "google/gemini-2.5-flash-preview-09-2025",
 785      "name": "Google: Gemini 2.5 Flash Preview 09-2025",
 786      "cost_per_1m_in": 0.3,
 787      "cost_per_1m_out": 2.5,
 788      "cost_per_1m_in_cached": 0.3833,
 789      "cost_per_1m_out_cached": 0.075,
 790      "context_window": 1048576,
 791      "default_max_tokens": 32767,
 792      "can_reason": true,
 793      "reasoning_levels": [
 794        "low",
 795        "medium",
 796        "high"
 797      ],
 798      "default_reasoning_effort": "medium",
 799      "supports_attachments": true,
 800      "options": {}
 801    },
 802    {
 803      "id": "google/gemini-2.5-pro",
 804      "name": "Google: Gemini 2.5 Pro",
 805      "cost_per_1m_in": 1.25,
 806      "cost_per_1m_out": 10,
 807      "cost_per_1m_in_cached": 1.625,
 808      "cost_per_1m_out_cached": 0.125,
 809      "context_window": 1048576,
 810      "default_max_tokens": 32768,
 811      "can_reason": true,
 812      "reasoning_levels": [
 813        "low",
 814        "medium",
 815        "high"
 816      ],
 817      "default_reasoning_effort": "medium",
 818      "supports_attachments": true,
 819      "options": {}
 820    },
 821    {
 822      "id": "google/gemini-2.5-pro-preview-05-06",
 823      "name": "Google: Gemini 2.5 Pro Preview 05-06",
 824      "cost_per_1m_in": 1.25,
 825      "cost_per_1m_out": 10,
 826      "cost_per_1m_in_cached": 1.625,
 827      "cost_per_1m_out_cached": 0.125,
 828      "context_window": 1048576,
 829      "default_max_tokens": 32768,
 830      "can_reason": true,
 831      "reasoning_levels": [
 832        "low",
 833        "medium",
 834        "high"
 835      ],
 836      "default_reasoning_effort": "medium",
 837      "supports_attachments": true,
 838      "options": {}
 839    },
 840    {
 841      "id": "google/gemini-2.5-pro-preview",
 842      "name": "Google: Gemini 2.5 Pro Preview 06-05",
 843      "cost_per_1m_in": 1.25,
 844      "cost_per_1m_out": 10,
 845      "cost_per_1m_in_cached": 1.625,
 846      "cost_per_1m_out_cached": 0.125,
 847      "context_window": 1048576,
 848      "default_max_tokens": 32768,
 849      "can_reason": true,
 850      "reasoning_levels": [
 851        "low",
 852        "medium",
 853        "high"
 854      ],
 855      "default_reasoning_effort": "medium",
 856      "supports_attachments": true,
 857      "options": {}
 858    },
 859    {
 860      "id": "google/gemini-3-pro-preview",
 861      "name": "Google: Gemini 3 Pro Preview",
 862      "cost_per_1m_in": 2,
 863      "cost_per_1m_out": 12,
 864      "cost_per_1m_in_cached": 2.375,
 865      "cost_per_1m_out_cached": 0.19999999999999998,
 866      "context_window": 1048576,
 867      "default_max_tokens": 32768,
 868      "can_reason": true,
 869      "reasoning_levels": [
 870        "low",
 871        "medium",
 872        "high"
 873      ],
 874      "default_reasoning_effort": "medium",
 875      "supports_attachments": true,
 876      "options": {}
 877    },
 878    {
 879      "id": "inception/mercury",
 880      "name": "Inception: Mercury",
 881      "cost_per_1m_in": 0.25,
 882      "cost_per_1m_out": 1,
 883      "cost_per_1m_in_cached": 0,
 884      "cost_per_1m_out_cached": 0,
 885      "context_window": 128000,
 886      "default_max_tokens": 8192,
 887      "can_reason": false,
 888      "supports_attachments": false,
 889      "options": {}
 890    },
 891    {
 892      "id": "inception/mercury-coder",
 893      "name": "Inception: Mercury Coder",
 894      "cost_per_1m_in": 0.25,
 895      "cost_per_1m_out": 1,
 896      "cost_per_1m_in_cached": 0,
 897      "cost_per_1m_out_cached": 0,
 898      "context_window": 128000,
 899      "default_max_tokens": 8192,
 900      "can_reason": false,
 901      "supports_attachments": false,
 902      "options": {}
 903    },
 904    {
 905      "id": "kwaipilot/kat-coder-pro:free",
 906      "name": "Kwaipilot: KAT-Coder-Pro V1 (free)",
 907      "cost_per_1m_in": 0,
 908      "cost_per_1m_out": 0,
 909      "cost_per_1m_in_cached": 0,
 910      "cost_per_1m_out_cached": 0,
 911      "context_window": 256000,
 912      "default_max_tokens": 16384,
 913      "can_reason": false,
 914      "supports_attachments": false,
 915      "options": {}
 916    },
 917    {
 918      "id": "meituan/longcat-flash-chat:free",
 919      "name": "Meituan: LongCat Flash Chat (free)",
 920      "cost_per_1m_in": 0,
 921      "cost_per_1m_out": 0,
 922      "cost_per_1m_in_cached": 0,
 923      "cost_per_1m_out_cached": 0,
 924      "context_window": 131072,
 925      "default_max_tokens": 65536,
 926      "can_reason": false,
 927      "supports_attachments": false,
 928      "options": {}
 929    },
 930    {
 931      "id": "meta-llama/llama-3.1-405b-instruct",
 932      "name": "Meta: Llama 3.1 405B Instruct",
 933      "cost_per_1m_in": 3.5,
 934      "cost_per_1m_out": 3.5,
 935      "cost_per_1m_in_cached": 0,
 936      "cost_per_1m_out_cached": 0,
 937      "context_window": 130815,
 938      "default_max_tokens": 13081,
 939      "can_reason": false,
 940      "supports_attachments": false,
 941      "options": {}
 942    },
 943    {
 944      "id": "meta-llama/llama-3.1-70b-instruct",
 945      "name": "Meta: Llama 3.1 70B Instruct",
 946      "cost_per_1m_in": 0.39999999999999997,
 947      "cost_per_1m_out": 0.39999999999999997,
 948      "cost_per_1m_in_cached": 0,
 949      "cost_per_1m_out_cached": 0,
 950      "context_window": 131072,
 951      "default_max_tokens": 8192,
 952      "can_reason": false,
 953      "supports_attachments": false,
 954      "options": {}
 955    },
 956    {
 957      "id": "meta-llama/llama-3.1-8b-instruct",
 958      "name": "Meta: Llama 3.1 8B Instruct",
 959      "cost_per_1m_in": 0.03,
 960      "cost_per_1m_out": 0.09,
 961      "cost_per_1m_in_cached": 0,
 962      "cost_per_1m_out_cached": 0,
 963      "context_window": 131072,
 964      "default_max_tokens": 13107,
 965      "can_reason": false,
 966      "supports_attachments": false,
 967      "options": {}
 968    },
 969    {
 970      "id": "meta-llama/llama-3.2-3b-instruct",
 971      "name": "Meta: Llama 3.2 3B Instruct",
 972      "cost_per_1m_in": 0.024,
 973      "cost_per_1m_out": 0.04,
 974      "cost_per_1m_in_cached": 0,
 975      "cost_per_1m_out_cached": 0,
 976      "context_window": 32768,
 977      "default_max_tokens": 16000,
 978      "can_reason": false,
 979      "supports_attachments": false,
 980      "options": {}
 981    },
 982    {
 983      "id": "meta-llama/llama-3.3-70b-instruct",
 984      "name": "Meta: Llama 3.3 70B Instruct",
 985      "cost_per_1m_in": 0.13,
 986      "cost_per_1m_out": 0.38,
 987      "cost_per_1m_in_cached": 0,
 988      "cost_per_1m_out_cached": 0,
 989      "context_window": 131072,
 990      "default_max_tokens": 8192,
 991      "can_reason": false,
 992      "supports_attachments": false,
 993      "options": {}
 994    },
 995    {
 996      "id": "meta-llama/llama-3.3-70b-instruct:free",
 997      "name": "Meta: Llama 3.3 70B Instruct (free)",
 998      "cost_per_1m_in": 0,
 999      "cost_per_1m_out": 0,
1000      "cost_per_1m_in_cached": 0,
1001      "cost_per_1m_out_cached": 0,
1002      "context_window": 131072,
1003      "default_max_tokens": 13107,
1004      "can_reason": false,
1005      "supports_attachments": false,
1006      "options": {}
1007    },
1008    {
1009      "id": "meta-llama/llama-4-maverick",
1010      "name": "Meta: Llama 4 Maverick",
1011      "cost_per_1m_in": 0.27,
1012      "cost_per_1m_out": 0.85,
1013      "cost_per_1m_in_cached": 0,
1014      "cost_per_1m_out_cached": 0,
1015      "context_window": 1048576,
1016      "default_max_tokens": 104857,
1017      "can_reason": false,
1018      "supports_attachments": true,
1019      "options": {}
1020    },
1021    {
1022      "id": "meta-llama/llama-4-scout",
1023      "name": "Meta: Llama 4 Scout",
1024      "cost_per_1m_in": 0.25,
1025      "cost_per_1m_out": 0.7,
1026      "cost_per_1m_in_cached": 0,
1027      "cost_per_1m_out_cached": 0,
1028      "context_window": 1310720,
1029      "default_max_tokens": 4096,
1030      "can_reason": false,
1031      "supports_attachments": true,
1032      "options": {}
1033    },
1034    {
1035      "id": "microsoft/phi-3-medium-128k-instruct",
1036      "name": "Microsoft: Phi-3 Medium 128K Instruct",
1037      "cost_per_1m_in": 1,
1038      "cost_per_1m_out": 1,
1039      "cost_per_1m_in_cached": 0,
1040      "cost_per_1m_out_cached": 0,
1041      "context_window": 128000,
1042      "default_max_tokens": 12800,
1043      "can_reason": false,
1044      "supports_attachments": false,
1045      "options": {}
1046    },
1047    {
1048      "id": "microsoft/phi-3-mini-128k-instruct",
1049      "name": "Microsoft: Phi-3 Mini 128K Instruct",
1050      "cost_per_1m_in": 0.09999999999999999,
1051      "cost_per_1m_out": 0.09999999999999999,
1052      "cost_per_1m_in_cached": 0,
1053      "cost_per_1m_out_cached": 0,
1054      "context_window": 128000,
1055      "default_max_tokens": 12800,
1056      "can_reason": false,
1057      "supports_attachments": false,
1058      "options": {}
1059    },
1060    {
1061      "id": "microsoft/phi-3.5-mini-128k-instruct",
1062      "name": "Microsoft: Phi-3.5 Mini 128K Instruct",
1063      "cost_per_1m_in": 0.09999999999999999,
1064      "cost_per_1m_out": 0.09999999999999999,
1065      "cost_per_1m_in_cached": 0,
1066      "cost_per_1m_out_cached": 0,
1067      "context_window": 128000,
1068      "default_max_tokens": 12800,
1069      "can_reason": false,
1070      "supports_attachments": false,
1071      "options": {}
1072    },
1073    {
1074      "id": "minimax/minimax-m2",
1075      "name": "MiniMax: MiniMax M2",
1076      "cost_per_1m_in": 0.3,
1077      "cost_per_1m_out": 1.2,
1078      "cost_per_1m_in_cached": 0,
1079      "cost_per_1m_out_cached": 0.03,
1080      "context_window": 204800,
1081      "default_max_tokens": 65536,
1082      "can_reason": true,
1083      "reasoning_levels": [
1084        "low",
1085        "medium",
1086        "high"
1087      ],
1088      "default_reasoning_effort": "medium",
1089      "supports_attachments": false,
1090      "options": {}
1091    },
1092    {
1093      "id": "mistralai/mistral-large",
1094      "name": "Mistral Large",
1095      "cost_per_1m_in": 2,
1096      "cost_per_1m_out": 6,
1097      "cost_per_1m_in_cached": 0,
1098      "cost_per_1m_out_cached": 0,
1099      "context_window": 128000,
1100      "default_max_tokens": 12800,
1101      "can_reason": false,
1102      "supports_attachments": false,
1103      "options": {}
1104    },
1105    {
1106      "id": "mistralai/mistral-large-2407",
1107      "name": "Mistral Large 2407",
1108      "cost_per_1m_in": 2,
1109      "cost_per_1m_out": 6,
1110      "cost_per_1m_in_cached": 0,
1111      "cost_per_1m_out_cached": 0,
1112      "context_window": 131072,
1113      "default_max_tokens": 13107,
1114      "can_reason": false,
1115      "supports_attachments": false,
1116      "options": {}
1117    },
1118    {
1119      "id": "mistralai/mistral-large-2411",
1120      "name": "Mistral Large 2411",
1121      "cost_per_1m_in": 2,
1122      "cost_per_1m_out": 6,
1123      "cost_per_1m_in_cached": 0,
1124      "cost_per_1m_out_cached": 0,
1125      "context_window": 131072,
1126      "default_max_tokens": 13107,
1127      "can_reason": false,
1128      "supports_attachments": false,
1129      "options": {}
1130    },
1131    {
1132      "id": "mistralai/mistral-tiny",
1133      "name": "Mistral Tiny",
1134      "cost_per_1m_in": 0.25,
1135      "cost_per_1m_out": 0.25,
1136      "cost_per_1m_in_cached": 0,
1137      "cost_per_1m_out_cached": 0,
1138      "context_window": 32768,
1139      "default_max_tokens": 3276,
1140      "can_reason": false,
1141      "supports_attachments": false,
1142      "options": {}
1143    },
1144    {
1145      "id": "mistralai/codestral-2508",
1146      "name": "Mistral: Codestral 2508",
1147      "cost_per_1m_in": 0.3,
1148      "cost_per_1m_out": 0.8999999999999999,
1149      "cost_per_1m_in_cached": 0,
1150      "cost_per_1m_out_cached": 0,
1151      "context_window": 256000,
1152      "default_max_tokens": 25600,
1153      "can_reason": false,
1154      "supports_attachments": false,
1155      "options": {}
1156    },
1157    {
1158      "id": "mistralai/devstral-medium",
1159      "name": "Mistral: Devstral Medium",
1160      "cost_per_1m_in": 0.39999999999999997,
1161      "cost_per_1m_out": 2,
1162      "cost_per_1m_in_cached": 0,
1163      "cost_per_1m_out_cached": 0,
1164      "context_window": 131072,
1165      "default_max_tokens": 13107,
1166      "can_reason": false,
1167      "supports_attachments": false,
1168      "options": {}
1169    },
1170    {
1171      "id": "mistralai/devstral-small",
1172      "name": "Mistral: Devstral Small 1.1",
1173      "cost_per_1m_in": 0.09999999999999999,
1174      "cost_per_1m_out": 0.3,
1175      "cost_per_1m_in_cached": 0,
1176      "cost_per_1m_out_cached": 0,
1177      "context_window": 131072,
1178      "default_max_tokens": 13107,
1179      "can_reason": false,
1180      "supports_attachments": false,
1181      "options": {}
1182    },
1183    {
1184      "id": "mistralai/magistral-medium-2506:thinking",
1185      "name": "Mistral: Magistral Medium 2506 (thinking)",
1186      "cost_per_1m_in": 2,
1187      "cost_per_1m_out": 5,
1188      "cost_per_1m_in_cached": 0,
1189      "cost_per_1m_out_cached": 0,
1190      "context_window": 40960,
1191      "default_max_tokens": 20000,
1192      "can_reason": true,
1193      "reasoning_levels": [
1194        "low",
1195        "medium",
1196        "high"
1197      ],
1198      "default_reasoning_effort": "medium",
1199      "supports_attachments": false,
1200      "options": {}
1201    },
1202    {
1203      "id": "mistralai/ministral-14b-2512",
1204      "name": "Mistral: Ministral 3 14B 2512",
1205      "cost_per_1m_in": 0.19999999999999998,
1206      "cost_per_1m_out": 0.19999999999999998,
1207      "cost_per_1m_in_cached": 0,
1208      "cost_per_1m_out_cached": 0,
1209      "context_window": 262144,
1210      "default_max_tokens": 26214,
1211      "can_reason": false,
1212      "supports_attachments": true,
1213      "options": {}
1214    },
1215    {
1216      "id": "mistralai/ministral-3b-2512",
1217      "name": "Mistral: Ministral 3 3B 2512",
1218      "cost_per_1m_in": 0.09999999999999999,
1219      "cost_per_1m_out": 0.09999999999999999,
1220      "cost_per_1m_in_cached": 0,
1221      "cost_per_1m_out_cached": 0,
1222      "context_window": 131072,
1223      "default_max_tokens": 13107,
1224      "can_reason": false,
1225      "supports_attachments": true,
1226      "options": {}
1227    },
1228    {
1229      "id": "mistralai/ministral-8b-2512",
1230      "name": "Mistral: Ministral 3 8B 2512",
1231      "cost_per_1m_in": 0.15,
1232      "cost_per_1m_out": 0.15,
1233      "cost_per_1m_in_cached": 0,
1234      "cost_per_1m_out_cached": 0,
1235      "context_window": 262144,
1236      "default_max_tokens": 26214,
1237      "can_reason": false,
1238      "supports_attachments": true,
1239      "options": {}
1240    },
1241    {
1242      "id": "mistralai/ministral-3b",
1243      "name": "Mistral: Ministral 3B",
1244      "cost_per_1m_in": 0.04,
1245      "cost_per_1m_out": 0.04,
1246      "cost_per_1m_in_cached": 0,
1247      "cost_per_1m_out_cached": 0,
1248      "context_window": 131072,
1249      "default_max_tokens": 13107,
1250      "can_reason": false,
1251      "supports_attachments": false,
1252      "options": {}
1253    },
1254    {
1255      "id": "mistralai/ministral-8b",
1256      "name": "Mistral: Ministral 8B",
1257      "cost_per_1m_in": 0.09999999999999999,
1258      "cost_per_1m_out": 0.09999999999999999,
1259      "cost_per_1m_in_cached": 0,
1260      "cost_per_1m_out_cached": 0,
1261      "context_window": 131072,
1262      "default_max_tokens": 13107,
1263      "can_reason": false,
1264      "supports_attachments": false,
1265      "options": {}
1266    },
1267    {
1268      "id": "mistralai/mistral-7b-instruct",
1269      "name": "Mistral: Mistral 7B Instruct",
1270      "cost_per_1m_in": 0.028,
1271      "cost_per_1m_out": 0.054,
1272      "cost_per_1m_in_cached": 0,
1273      "cost_per_1m_out_cached": 0,
1274      "context_window": 32768,
1275      "default_max_tokens": 8192,
1276      "can_reason": false,
1277      "supports_attachments": false,
1278      "options": {}
1279    },
1280    {
1281      "id": "mistralai/mistral-7b-instruct:free",
1282      "name": "Mistral: Mistral 7B Instruct (free)",
1283      "cost_per_1m_in": 0,
1284      "cost_per_1m_out": 0,
1285      "cost_per_1m_in_cached": 0,
1286      "cost_per_1m_out_cached": 0,
1287      "context_window": 32768,
1288      "default_max_tokens": 8192,
1289      "can_reason": false,
1290      "supports_attachments": false,
1291      "options": {}
1292    },
1293    {
1294      "id": "mistralai/mistral-large-2512",
1295      "name": "Mistral: Mistral Large 3 2512",
1296      "cost_per_1m_in": 0.5,
1297      "cost_per_1m_out": 1.5,
1298      "cost_per_1m_in_cached": 0,
1299      "cost_per_1m_out_cached": 0,
1300      "context_window": 262144,
1301      "default_max_tokens": 26214,
1302      "can_reason": false,
1303      "supports_attachments": true,
1304      "options": {}
1305    },
1306    {
1307      "id": "mistralai/mistral-medium-3",
1308      "name": "Mistral: Mistral Medium 3",
1309      "cost_per_1m_in": 0.39999999999999997,
1310      "cost_per_1m_out": 2,
1311      "cost_per_1m_in_cached": 0,
1312      "cost_per_1m_out_cached": 0,
1313      "context_window": 131072,
1314      "default_max_tokens": 13107,
1315      "can_reason": false,
1316      "supports_attachments": true,
1317      "options": {}
1318    },
1319    {
1320      "id": "mistralai/mistral-medium-3.1",
1321      "name": "Mistral: Mistral Medium 3.1",
1322      "cost_per_1m_in": 0.39999999999999997,
1323      "cost_per_1m_out": 2,
1324      "cost_per_1m_in_cached": 0,
1325      "cost_per_1m_out_cached": 0,
1326      "context_window": 131072,
1327      "default_max_tokens": 13107,
1328      "can_reason": false,
1329      "supports_attachments": true,
1330      "options": {}
1331    },
1332    {
1333      "id": "mistralai/mistral-nemo",
1334      "name": "Mistral: Mistral Nemo",
1335      "cost_per_1m_in": 0.15,
1336      "cost_per_1m_out": 0.15,
1337      "cost_per_1m_in_cached": 0,
1338      "cost_per_1m_out_cached": 0,
1339      "context_window": 131072,
1340      "default_max_tokens": 13107,
1341      "can_reason": false,
1342      "supports_attachments": false,
1343      "options": {}
1344    },
1345    {
1346      "id": "mistralai/mistral-small-24b-instruct-2501",
1347      "name": "Mistral: Mistral Small 3",
1348      "cost_per_1m_in": 0.7999999999999999,
1349      "cost_per_1m_out": 0.7999999999999999,
1350      "cost_per_1m_in_cached": 0,
1351      "cost_per_1m_out_cached": 0,
1352      "context_window": 32768,
1353      "default_max_tokens": 1024,
1354      "can_reason": false,
1355      "supports_attachments": false,
1356      "options": {}
1357    },
1358    {
1359      "id": "mistralai/mistral-small-3.1-24b-instruct",
1360      "name": "Mistral: Mistral Small 3.1 24B",
1361      "cost_per_1m_in": 0.03,
1362      "cost_per_1m_out": 0.11,
1363      "cost_per_1m_in_cached": 0,
1364      "cost_per_1m_out_cached": 0,
1365      "context_window": 131072,
1366      "default_max_tokens": 65536,
1367      "can_reason": false,
1368      "supports_attachments": true,
1369      "options": {}
1370    },
1371    {
1372      "id": "mistralai/mistral-small-3.1-24b-instruct:free",
1373      "name": "Mistral: Mistral Small 3.1 24B (free)",
1374      "cost_per_1m_in": 0,
1375      "cost_per_1m_out": 0,
1376      "cost_per_1m_in_cached": 0,
1377      "cost_per_1m_out_cached": 0,
1378      "context_window": 128000,
1379      "default_max_tokens": 12800,
1380      "can_reason": false,
1381      "supports_attachments": true,
1382      "options": {}
1383    },
1384    {
1385      "id": "mistralai/mistral-small-3.2-24b-instruct",
1386      "name": "Mistral: Mistral Small 3.2 24B",
1387      "cost_per_1m_in": 0.09999999999999999,
1388      "cost_per_1m_out": 0.3,
1389      "cost_per_1m_in_cached": 0,
1390      "cost_per_1m_out_cached": 0,
1391      "context_window": 131072,
1392      "default_max_tokens": 13107,
1393      "can_reason": false,
1394      "supports_attachments": true,
1395      "options": {}
1396    },
1397    {
1398      "id": "mistralai/mixtral-8x22b-instruct",
1399      "name": "Mistral: Mixtral 8x22B Instruct",
1400      "cost_per_1m_in": 2,
1401      "cost_per_1m_out": 6,
1402      "cost_per_1m_in_cached": 0,
1403      "cost_per_1m_out_cached": 0,
1404      "context_window": 65536,
1405      "default_max_tokens": 6553,
1406      "can_reason": false,
1407      "supports_attachments": false,
1408      "options": {}
1409    },
1410    {
1411      "id": "mistralai/mixtral-8x7b-instruct",
1412      "name": "Mistral: Mixtral 8x7B Instruct",
1413      "cost_per_1m_in": 0.54,
1414      "cost_per_1m_out": 0.54,
1415      "cost_per_1m_in_cached": 0,
1416      "cost_per_1m_out_cached": 0,
1417      "context_window": 32768,
1418      "default_max_tokens": 8192,
1419      "can_reason": false,
1420      "supports_attachments": false,
1421      "options": {}
1422    },
1423    {
1424      "id": "mistralai/pixtral-12b",
1425      "name": "Mistral: Pixtral 12B",
1426      "cost_per_1m_in": 0.15,
1427      "cost_per_1m_out": 0.15,
1428      "cost_per_1m_in_cached": 0,
1429      "cost_per_1m_out_cached": 0,
1430      "context_window": 131072,
1431      "default_max_tokens": 13107,
1432      "can_reason": false,
1433      "supports_attachments": true,
1434      "options": {}
1435    },
1436    {
1437      "id": "mistralai/pixtral-large-2411",
1438      "name": "Mistral: Pixtral Large 2411",
1439      "cost_per_1m_in": 2,
1440      "cost_per_1m_out": 6,
1441      "cost_per_1m_in_cached": 0,
1442      "cost_per_1m_out_cached": 0,
1443      "context_window": 131072,
1444      "default_max_tokens": 13107,
1445      "can_reason": false,
1446      "supports_attachments": true,
1447      "options": {}
1448    },
1449    {
1450      "id": "mistralai/mistral-saba",
1451      "name": "Mistral: Saba",
1452      "cost_per_1m_in": 0.19999999999999998,
1453      "cost_per_1m_out": 0.6,
1454      "cost_per_1m_in_cached": 0,
1455      "cost_per_1m_out_cached": 0,
1456      "context_window": 32768,
1457      "default_max_tokens": 3276,
1458      "can_reason": false,
1459      "supports_attachments": false,
1460      "options": {}
1461    },
1462    {
1463      "id": "mistralai/voxtral-small-24b-2507",
1464      "name": "Mistral: Voxtral Small 24B 2507",
1465      "cost_per_1m_in": 0.09999999999999999,
1466      "cost_per_1m_out": 0.3,
1467      "cost_per_1m_in_cached": 0,
1468      "cost_per_1m_out_cached": 0,
1469      "context_window": 32000,
1470      "default_max_tokens": 3200,
1471      "can_reason": false,
1472      "supports_attachments": false,
1473      "options": {}
1474    },
1475    {
1476      "id": "moonshotai/kimi-k2",
1477      "name": "MoonshotAI: Kimi K2 0711",
1478      "cost_per_1m_in": 0.5,
1479      "cost_per_1m_out": 2.4,
1480      "cost_per_1m_in_cached": 0,
1481      "cost_per_1m_out_cached": 0,
1482      "context_window": 131072,
1483      "default_max_tokens": 13107,
1484      "can_reason": false,
1485      "supports_attachments": false,
1486      "options": {}
1487    },
1488    {
1489      "id": "moonshotai/kimi-k2-0905",
1490      "name": "MoonshotAI: Kimi K2 0905",
1491      "cost_per_1m_in": 0.39,
1492      "cost_per_1m_out": 1.9,
1493      "cost_per_1m_in_cached": 0,
1494      "cost_per_1m_out_cached": 0,
1495      "context_window": 262144,
1496      "default_max_tokens": 131072,
1497      "can_reason": false,
1498      "supports_attachments": false,
1499      "options": {}
1500    },
1501    {
1502      "id": "moonshotai/kimi-k2-0905:exacto",
1503      "name": "MoonshotAI: Kimi K2 0905 (exacto)",
1504      "cost_per_1m_in": 0.6,
1505      "cost_per_1m_out": 2.5,
1506      "cost_per_1m_in_cached": 0,
1507      "cost_per_1m_out_cached": 0,
1508      "context_window": 262144,
1509      "default_max_tokens": 26214,
1510      "can_reason": false,
1511      "supports_attachments": false,
1512      "options": {}
1513    },
1514    {
1515      "id": "moonshotai/kimi-k2-thinking",
1516      "name": "MoonshotAI: Kimi K2 Thinking",
1517      "cost_per_1m_in": 0.6,
1518      "cost_per_1m_out": 2.5,
1519      "cost_per_1m_in_cached": 0,
1520      "cost_per_1m_out_cached": 0.15,
1521      "context_window": 262144,
1522      "default_max_tokens": 131072,
1523      "can_reason": true,
1524      "reasoning_levels": [
1525        "low",
1526        "medium",
1527        "high"
1528      ],
1529      "default_reasoning_effort": "medium",
1530      "supports_attachments": false,
1531      "options": {}
1532    },
1533    {
1534      "id": "nvidia/llama-3.1-nemotron-70b-instruct",
1535      "name": "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
1536      "cost_per_1m_in": 1.2,
1537      "cost_per_1m_out": 1.2,
1538      "cost_per_1m_in_cached": 0,
1539      "cost_per_1m_out_cached": 0,
1540      "context_window": 131072,
1541      "default_max_tokens": 8192,
1542      "can_reason": false,
1543      "supports_attachments": false,
1544      "options": {}
1545    },
1546    {
1547      "id": "nvidia/llama-3.3-nemotron-super-49b-v1.5",
1548      "name": "NVIDIA: Llama 3.3 Nemotron Super 49B V1.5",
1549      "cost_per_1m_in": 0.09999999999999999,
1550      "cost_per_1m_out": 0.39999999999999997,
1551      "cost_per_1m_in_cached": 0,
1552      "cost_per_1m_out_cached": 0,
1553      "context_window": 131072,
1554      "default_max_tokens": 13107,
1555      "can_reason": true,
1556      "reasoning_levels": [
1557        "low",
1558        "medium",
1559        "high"
1560      ],
1561      "default_reasoning_effort": "medium",
1562      "supports_attachments": false,
1563      "options": {}
1564    },
1565    {
1566      "id": "nvidia/nemotron-nano-12b-v2-vl:free",
1567      "name": "NVIDIA: Nemotron Nano 12B 2 VL (free)",
1568      "cost_per_1m_in": 0,
1569      "cost_per_1m_out": 0,
1570      "cost_per_1m_in_cached": 0,
1571      "cost_per_1m_out_cached": 0,
1572      "context_window": 128000,
1573      "default_max_tokens": 64000,
1574      "can_reason": true,
1575      "reasoning_levels": [
1576        "low",
1577        "medium",
1578        "high"
1579      ],
1580      "default_reasoning_effort": "medium",
1581      "supports_attachments": true,
1582      "options": {}
1583    },
1584    {
1585      "id": "nvidia/nemotron-nano-9b-v2",
1586      "name": "NVIDIA: Nemotron Nano 9B V2",
1587      "cost_per_1m_in": 0.04,
1588      "cost_per_1m_out": 0.16,
1589      "cost_per_1m_in_cached": 0,
1590      "cost_per_1m_out_cached": 0,
1591      "context_window": 131072,
1592      "default_max_tokens": 13107,
1593      "can_reason": true,
1594      "reasoning_levels": [
1595        "low",
1596        "medium",
1597        "high"
1598      ],
1599      "default_reasoning_effort": "medium",
1600      "supports_attachments": false,
1601      "options": {}
1602    },
1603    {
1604      "id": "nvidia/nemotron-nano-9b-v2:free",
1605      "name": "NVIDIA: Nemotron Nano 9B V2 (free)",
1606      "cost_per_1m_in": 0,
1607      "cost_per_1m_out": 0,
1608      "cost_per_1m_in_cached": 0,
1609      "cost_per_1m_out_cached": 0,
1610      "context_window": 128000,
1611      "default_max_tokens": 12800,
1612      "can_reason": true,
1613      "reasoning_levels": [
1614        "low",
1615        "medium",
1616        "high"
1617      ],
1618      "default_reasoning_effort": "medium",
1619      "supports_attachments": false,
1620      "options": {}
1621    },
1622    {
1623      "id": "nousresearch/deephermes-3-mistral-24b-preview",
1624      "name": "Nous: DeepHermes 3 Mistral 24B Preview",
1625      "cost_per_1m_in": 0.049999999999999996,
1626      "cost_per_1m_out": 0.19999999999999998,
1627      "cost_per_1m_in_cached": 0,
1628      "cost_per_1m_out_cached": 0,
1629      "context_window": 32768,
1630      "default_max_tokens": 16384,
1631      "can_reason": true,
1632      "reasoning_levels": [
1633        "low",
1634        "medium",
1635        "high"
1636      ],
1637      "default_reasoning_effort": "medium",
1638      "supports_attachments": false,
1639      "options": {}
1640    },
1641    {
1642      "id": "nousresearch/hermes-4-405b",
1643      "name": "Nous: Hermes 4 405B",
1644      "cost_per_1m_in": 0.3,
1645      "cost_per_1m_out": 1.2,
1646      "cost_per_1m_in_cached": 0,
1647      "cost_per_1m_out_cached": 0,
1648      "context_window": 131072,
1649      "default_max_tokens": 65536,
1650      "can_reason": true,
1651      "reasoning_levels": [
1652        "low",
1653        "medium",
1654        "high"
1655      ],
1656      "default_reasoning_effort": "medium",
1657      "supports_attachments": false,
1658      "options": {}
1659    },
1660    {
1661      "id": "nousresearch/hermes-4-70b",
1662      "name": "Nous: Hermes 4 70B",
1663      "cost_per_1m_in": 0.11,
1664      "cost_per_1m_out": 0.38,
1665      "cost_per_1m_in_cached": 0,
1666      "cost_per_1m_out_cached": 0,
1667      "context_window": 131072,
1668      "default_max_tokens": 65536,
1669      "can_reason": true,
1670      "reasoning_levels": [
1671        "low",
1672        "medium",
1673        "high"
1674      ],
1675      "default_reasoning_effort": "medium",
1676      "supports_attachments": false,
1677      "options": {}
1678    },
1679    {
1680      "id": "openai/codex-mini",
1681      "name": "OpenAI: Codex Mini",
1682      "cost_per_1m_in": 1.5,
1683      "cost_per_1m_out": 6,
1684      "cost_per_1m_in_cached": 0,
1685      "cost_per_1m_out_cached": 0.375,
1686      "context_window": 200000,
1687      "default_max_tokens": 50000,
1688      "can_reason": true,
1689      "reasoning_levels": [
1690        "low",
1691        "medium",
1692        "high"
1693      ],
1694      "default_reasoning_effort": "medium",
1695      "supports_attachments": true,
1696      "options": {}
1697    },
1698    {
1699      "id": "openai/gpt-4-turbo",
1700      "name": "OpenAI: GPT-4 Turbo",
1701      "cost_per_1m_in": 10,
1702      "cost_per_1m_out": 30,
1703      "cost_per_1m_in_cached": 0,
1704      "cost_per_1m_out_cached": 0,
1705      "context_window": 128000,
1706      "default_max_tokens": 2048,
1707      "can_reason": false,
1708      "supports_attachments": true,
1709      "options": {}
1710    },
1711    {
1712      "id": "openai/gpt-4-1106-preview",
1713      "name": "OpenAI: GPT-4 Turbo (older v1106)",
1714      "cost_per_1m_in": 10,
1715      "cost_per_1m_out": 30,
1716      "cost_per_1m_in_cached": 0,
1717      "cost_per_1m_out_cached": 0,
1718      "context_window": 128000,
1719      "default_max_tokens": 2048,
1720      "can_reason": false,
1721      "supports_attachments": false,
1722      "options": {}
1723    },
1724    {
1725      "id": "openai/gpt-4-turbo-preview",
1726      "name": "OpenAI: GPT-4 Turbo Preview",
1727      "cost_per_1m_in": 10,
1728      "cost_per_1m_out": 30,
1729      "cost_per_1m_in_cached": 0,
1730      "cost_per_1m_out_cached": 0,
1731      "context_window": 128000,
1732      "default_max_tokens": 2048,
1733      "can_reason": false,
1734      "supports_attachments": false,
1735      "options": {}
1736    },
1737    {
1738      "id": "openai/gpt-4.1",
1739      "name": "OpenAI: GPT-4.1",
1740      "cost_per_1m_in": 2,
1741      "cost_per_1m_out": 8,
1742      "cost_per_1m_in_cached": 0,
1743      "cost_per_1m_out_cached": 0.5,
1744      "context_window": 1047576,
1745      "default_max_tokens": 104757,
1746      "can_reason": false,
1747      "supports_attachments": true,
1748      "options": {}
1749    },
1750    {
1751      "id": "openai/gpt-4.1-mini",
1752      "name": "OpenAI: GPT-4.1 Mini",
1753      "cost_per_1m_in": 0.39999999999999997,
1754      "cost_per_1m_out": 1.5999999999999999,
1755      "cost_per_1m_in_cached": 0,
1756      "cost_per_1m_out_cached": 0.09999999999999999,
1757      "context_window": 1047576,
1758      "default_max_tokens": 104757,
1759      "can_reason": false,
1760      "supports_attachments": true,
1761      "options": {}
1762    },
1763    {
1764      "id": "openai/gpt-4.1-nano",
1765      "name": "OpenAI: GPT-4.1 Nano",
1766      "cost_per_1m_in": 0.09999999999999999,
1767      "cost_per_1m_out": 0.39999999999999997,
1768      "cost_per_1m_in_cached": 0,
1769      "cost_per_1m_out_cached": 0.03,
1770      "context_window": 1047576,
1771      "default_max_tokens": 104757,
1772      "can_reason": false,
1773      "supports_attachments": true,
1774      "options": {}
1775    },
1776    {
1777      "id": "openai/gpt-4o",
1778      "name": "OpenAI: GPT-4o",
1779      "cost_per_1m_in": 2.5,
1780      "cost_per_1m_out": 10,
1781      "cost_per_1m_in_cached": 0,
1782      "cost_per_1m_out_cached": 0,
1783      "context_window": 128000,
1784      "default_max_tokens": 8192,
1785      "can_reason": false,
1786      "supports_attachments": true,
1787      "options": {}
1788    },
1789    {
1790      "id": "openai/gpt-4o-2024-05-13",
1791      "name": "OpenAI: GPT-4o (2024-05-13)",
1792      "cost_per_1m_in": 5,
1793      "cost_per_1m_out": 15,
1794      "cost_per_1m_in_cached": 0,
1795      "cost_per_1m_out_cached": 0,
1796      "context_window": 128000,
1797      "default_max_tokens": 2048,
1798      "can_reason": false,
1799      "supports_attachments": true,
1800      "options": {}
1801    },
1802    {
1803      "id": "openai/gpt-4o-2024-08-06",
1804      "name": "OpenAI: GPT-4o (2024-08-06)",
1805      "cost_per_1m_in": 2.5,
1806      "cost_per_1m_out": 10,
1807      "cost_per_1m_in_cached": 0,
1808      "cost_per_1m_out_cached": 1.25,
1809      "context_window": 128000,
1810      "default_max_tokens": 8192,
1811      "can_reason": false,
1812      "supports_attachments": true,
1813      "options": {}
1814    },
1815    {
1816      "id": "openai/gpt-4o-2024-11-20",
1817      "name": "OpenAI: GPT-4o (2024-11-20)",
1818      "cost_per_1m_in": 2.5,
1819      "cost_per_1m_out": 10,
1820      "cost_per_1m_in_cached": 0,
1821      "cost_per_1m_out_cached": 1.25,
1822      "context_window": 128000,
1823      "default_max_tokens": 8192,
1824      "can_reason": false,
1825      "supports_attachments": true,
1826      "options": {}
1827    },
1828    {
1829      "id": "openai/gpt-4o:extended",
1830      "name": "OpenAI: GPT-4o (extended)",
1831      "cost_per_1m_in": 6,
1832      "cost_per_1m_out": 18,
1833      "cost_per_1m_in_cached": 0,
1834      "cost_per_1m_out_cached": 0,
1835      "context_window": 128000,
1836      "default_max_tokens": 32000,
1837      "can_reason": false,
1838      "supports_attachments": true,
1839      "options": {}
1840    },
1841    {
1842      "id": "openai/gpt-4o-audio-preview",
1843      "name": "OpenAI: GPT-4o Audio",
1844      "cost_per_1m_in": 2.5,
1845      "cost_per_1m_out": 10,
1846      "cost_per_1m_in_cached": 0,
1847      "cost_per_1m_out_cached": 0,
1848      "context_window": 128000,
1849      "default_max_tokens": 8192,
1850      "can_reason": false,
1851      "supports_attachments": false,
1852      "options": {}
1853    },
1854    {
1855      "id": "openai/gpt-4o-mini",
1856      "name": "OpenAI: GPT-4o-mini",
1857      "cost_per_1m_in": 0.15,
1858      "cost_per_1m_out": 0.6,
1859      "cost_per_1m_in_cached": 0,
1860      "cost_per_1m_out_cached": 0.075,
1861      "context_window": 128000,
1862      "default_max_tokens": 8192,
1863      "can_reason": false,
1864      "supports_attachments": true,
1865      "options": {}
1866    },
1867    {
1868      "id": "openai/gpt-4o-mini-2024-07-18",
1869      "name": "OpenAI: GPT-4o-mini (2024-07-18)",
1870      "cost_per_1m_in": 0.15,
1871      "cost_per_1m_out": 0.6,
1872      "cost_per_1m_in_cached": 0,
1873      "cost_per_1m_out_cached": 0.075,
1874      "context_window": 128000,
1875      "default_max_tokens": 8192,
1876      "can_reason": false,
1877      "supports_attachments": true,
1878      "options": {}
1879    },
1880    {
1881      "id": "openai/gpt-5",
1882      "name": "OpenAI: GPT-5",
1883      "cost_per_1m_in": 1.25,
1884      "cost_per_1m_out": 10,
1885      "cost_per_1m_in_cached": 0,
1886      "cost_per_1m_out_cached": 0.125,
1887      "context_window": 400000,
1888      "default_max_tokens": 64000,
1889      "can_reason": true,
1890      "reasoning_levels": [
1891        "low",
1892        "medium",
1893        "high"
1894      ],
1895      "default_reasoning_effort": "medium",
1896      "supports_attachments": true,
1897      "options": {}
1898    },
1899    {
1900      "id": "openai/gpt-5-codex",
1901      "name": "OpenAI: GPT-5 Codex",
1902      "cost_per_1m_in": 1.25,
1903      "cost_per_1m_out": 10,
1904      "cost_per_1m_in_cached": 0,
1905      "cost_per_1m_out_cached": 0.125,
1906      "context_window": 400000,
1907      "default_max_tokens": 64000,
1908      "can_reason": true,
1909      "reasoning_levels": [
1910        "low",
1911        "medium",
1912        "high"
1913      ],
1914      "default_reasoning_effort": "medium",
1915      "supports_attachments": true,
1916      "options": {}
1917    },
1918    {
1919      "id": "openai/gpt-5-image",
1920      "name": "OpenAI: GPT-5 Image",
1921      "cost_per_1m_in": 10,
1922      "cost_per_1m_out": 10,
1923      "cost_per_1m_in_cached": 0,
1924      "cost_per_1m_out_cached": 1.25,
1925      "context_window": 400000,
1926      "default_max_tokens": 64000,
1927      "can_reason": true,
1928      "reasoning_levels": [
1929        "low",
1930        "medium",
1931        "high"
1932      ],
1933      "default_reasoning_effort": "medium",
1934      "supports_attachments": true,
1935      "options": {}
1936    },
1937    {
1938      "id": "openai/gpt-5-image-mini",
1939      "name": "OpenAI: GPT-5 Image Mini",
1940      "cost_per_1m_in": 2.5,
1941      "cost_per_1m_out": 2,
1942      "cost_per_1m_in_cached": 0,
1943      "cost_per_1m_out_cached": 0.25,
1944      "context_window": 400000,
1945      "default_max_tokens": 64000,
1946      "can_reason": true,
1947      "reasoning_levels": [
1948        "low",
1949        "medium",
1950        "high"
1951      ],
1952      "default_reasoning_effort": "medium",
1953      "supports_attachments": true,
1954      "options": {}
1955    },
1956    {
1957      "id": "openai/gpt-5-mini",
1958      "name": "OpenAI: GPT-5 Mini",
1959      "cost_per_1m_in": 0.25,
1960      "cost_per_1m_out": 2,
1961      "cost_per_1m_in_cached": 0,
1962      "cost_per_1m_out_cached": 0.03,
1963      "context_window": 400000,
1964      "default_max_tokens": 40000,
1965      "can_reason": true,
1966      "reasoning_levels": [
1967        "low",
1968        "medium",
1969        "high"
1970      ],
1971      "default_reasoning_effort": "medium",
1972      "supports_attachments": true,
1973      "options": {}
1974    },
1975    {
1976      "id": "openai/gpt-5-nano",
1977      "name": "OpenAI: GPT-5 Nano",
1978      "cost_per_1m_in": 0.049999999999999996,
1979      "cost_per_1m_out": 0.39999999999999997,
1980      "cost_per_1m_in_cached": 0,
1981      "cost_per_1m_out_cached": 0.005,
1982      "context_window": 400000,
1983      "default_max_tokens": 64000,
1984      "can_reason": true,
1985      "reasoning_levels": [
1986        "low",
1987        "medium",
1988        "high"
1989      ],
1990      "default_reasoning_effort": "medium",
1991      "supports_attachments": true,
1992      "options": {}
1993    },
1994    {
1995      "id": "openai/gpt-5-pro",
1996      "name": "OpenAI: GPT-5 Pro",
1997      "cost_per_1m_in": 15,
1998      "cost_per_1m_out": 120,
1999      "cost_per_1m_in_cached": 0,
2000      "cost_per_1m_out_cached": 0,
2001      "context_window": 400000,
2002      "default_max_tokens": 64000,
2003      "can_reason": true,
2004      "reasoning_levels": [
2005        "low",
2006        "medium",
2007        "high"
2008      ],
2009      "default_reasoning_effort": "medium",
2010      "supports_attachments": true,
2011      "options": {}
2012    },
2013    {
2014      "id": "openai/gpt-5.1",
2015      "name": "OpenAI: GPT-5.1",
2016      "cost_per_1m_in": 1.25,
2017      "cost_per_1m_out": 10,
2018      "cost_per_1m_in_cached": 0,
2019      "cost_per_1m_out_cached": 0.125,
2020      "context_window": 400000,
2021      "default_max_tokens": 64000,
2022      "can_reason": true,
2023      "reasoning_levels": [
2024        "low",
2025        "medium",
2026        "high"
2027      ],
2028      "default_reasoning_effort": "medium",
2029      "supports_attachments": true,
2030      "options": {}
2031    },
2032    {
2033      "id": "openai/gpt-5.1-chat",
2034      "name": "OpenAI: GPT-5.1 Chat",
2035      "cost_per_1m_in": 1.25,
2036      "cost_per_1m_out": 10,
2037      "cost_per_1m_in_cached": 0,
2038      "cost_per_1m_out_cached": 0.125,
2039      "context_window": 128000,
2040      "default_max_tokens": 8192,
2041      "can_reason": false,
2042      "supports_attachments": true,
2043      "options": {}
2044    },
2045    {
2046      "id": "openai/gpt-5.1-codex",
2047      "name": "OpenAI: GPT-5.1-Codex",
2048      "cost_per_1m_in": 1.25,
2049      "cost_per_1m_out": 10,
2050      "cost_per_1m_in_cached": 0,
2051      "cost_per_1m_out_cached": 0.125,
2052      "context_window": 400000,
2053      "default_max_tokens": 64000,
2054      "can_reason": true,
2055      "reasoning_levels": [
2056        "low",
2057        "medium",
2058        "high"
2059      ],
2060      "default_reasoning_effort": "medium",
2061      "supports_attachments": true,
2062      "options": {}
2063    },
2064    {
2065      "id": "openai/gpt-5.1-codex-mini",
2066      "name": "OpenAI: GPT-5.1-Codex-Mini",
2067      "cost_per_1m_in": 0.25,
2068      "cost_per_1m_out": 2,
2069      "cost_per_1m_in_cached": 0,
2070      "cost_per_1m_out_cached": 0.024999999999999998,
2071      "context_window": 400000,
2072      "default_max_tokens": 50000,
2073      "can_reason": true,
2074      "reasoning_levels": [
2075        "low",
2076        "medium",
2077        "high"
2078      ],
2079      "default_reasoning_effort": "medium",
2080      "supports_attachments": true,
2081      "options": {}
2082    },
2083    {
2084      "id": "openai/gpt-oss-120b",
2085      "name": "OpenAI: gpt-oss-120b",
2086      "cost_per_1m_in": 0.09999999999999999,
2087      "cost_per_1m_out": 0.49,
2088      "cost_per_1m_in_cached": 0,
2089      "cost_per_1m_out_cached": 0,
2090      "context_window": 131072,
2091      "default_max_tokens": 13107,
2092      "can_reason": true,
2093      "reasoning_levels": [
2094        "low",
2095        "medium",
2096        "high"
2097      ],
2098      "default_reasoning_effort": "medium",
2099      "supports_attachments": false,
2100      "options": {}
2101    },
2102    {
2103      "id": "openai/gpt-oss-120b:exacto",
2104      "name": "OpenAI: gpt-oss-120b (exacto)",
2105      "cost_per_1m_in": 0.04,
2106      "cost_per_1m_out": 0.19999999999999998,
2107      "cost_per_1m_in_cached": 0,
2108      "cost_per_1m_out_cached": 0,
2109      "context_window": 131072,
2110      "default_max_tokens": 16384,
2111      "can_reason": true,
2112      "reasoning_levels": [
2113        "low",
2114        "medium",
2115        "high"
2116      ],
2117      "default_reasoning_effort": "medium",
2118      "supports_attachments": false,
2119      "options": {}
2120    },
2121    {
2122      "id": "openai/gpt-oss-20b",
2123      "name": "OpenAI: gpt-oss-20b",
2124      "cost_per_1m_in": 0.04,
2125      "cost_per_1m_out": 0.15,
2126      "cost_per_1m_in_cached": 0,
2127      "cost_per_1m_out_cached": 0,
2128      "context_window": 131072,
2129      "default_max_tokens": 13107,
2130      "can_reason": true,
2131      "reasoning_levels": [
2132        "low",
2133        "medium",
2134        "high"
2135      ],
2136      "default_reasoning_effort": "medium",
2137      "supports_attachments": false,
2138      "options": {}
2139    },
2140    {
2141      "id": "openai/gpt-oss-20b:free",
2142      "name": "OpenAI: gpt-oss-20b (free)",
2143      "cost_per_1m_in": 0,
2144      "cost_per_1m_out": 0,
2145      "cost_per_1m_in_cached": 0,
2146      "cost_per_1m_out_cached": 0,
2147      "context_window": 131072,
2148      "default_max_tokens": 65536,
2149      "can_reason": true,
2150      "reasoning_levels": [
2151        "low",
2152        "medium",
2153        "high"
2154      ],
2155      "default_reasoning_effort": "medium",
2156      "supports_attachments": false,
2157      "options": {}
2158    },
2159    {
2160      "id": "openai/gpt-oss-safeguard-20b",
2161      "name": "OpenAI: gpt-oss-safeguard-20b",
2162      "cost_per_1m_in": 0.075,
2163      "cost_per_1m_out": 0.3,
2164      "cost_per_1m_in_cached": 0,
2165      "cost_per_1m_out_cached": 0.037,
2166      "context_window": 131072,
2167      "default_max_tokens": 32768,
2168      "can_reason": true,
2169      "reasoning_levels": [
2170        "low",
2171        "medium",
2172        "high"
2173      ],
2174      "default_reasoning_effort": "medium",
2175      "supports_attachments": false,
2176      "options": {}
2177    },
2178    {
2179      "id": "openai/o1",
2180      "name": "OpenAI: o1",
2181      "cost_per_1m_in": 15,
2182      "cost_per_1m_out": 60,
2183      "cost_per_1m_in_cached": 0,
2184      "cost_per_1m_out_cached": 7.5,
2185      "context_window": 200000,
2186      "default_max_tokens": 50000,
2187      "can_reason": false,
2188      "supports_attachments": true,
2189      "options": {}
2190    },
2191    {
2192      "id": "openai/o3",
2193      "name": "OpenAI: o3",
2194      "cost_per_1m_in": 2,
2195      "cost_per_1m_out": 8,
2196      "cost_per_1m_in_cached": 0,
2197      "cost_per_1m_out_cached": 0.5,
2198      "context_window": 200000,
2199      "default_max_tokens": 50000,
2200      "can_reason": true,
2201      "reasoning_levels": [
2202        "low",
2203        "medium",
2204        "high"
2205      ],
2206      "default_reasoning_effort": "medium",
2207      "supports_attachments": true,
2208      "options": {}
2209    },
2210    {
2211      "id": "openai/o3-deep-research",
2212      "name": "OpenAI: o3 Deep Research",
2213      "cost_per_1m_in": 10,
2214      "cost_per_1m_out": 40,
2215      "cost_per_1m_in_cached": 0,
2216      "cost_per_1m_out_cached": 2.5,
2217      "context_window": 200000,
2218      "default_max_tokens": 50000,
2219      "can_reason": true,
2220      "reasoning_levels": [
2221        "low",
2222        "medium",
2223        "high"
2224      ],
2225      "default_reasoning_effort": "medium",
2226      "supports_attachments": true,
2227      "options": {}
2228    },
2229    {
2230      "id": "openai/o3-mini",
2231      "name": "OpenAI: o3 Mini",
2232      "cost_per_1m_in": 1.1,
2233      "cost_per_1m_out": 4.4,
2234      "cost_per_1m_in_cached": 0,
2235      "cost_per_1m_out_cached": 0.55,
2236      "context_window": 200000,
2237      "default_max_tokens": 50000,
2238      "can_reason": false,
2239      "supports_attachments": false,
2240      "options": {}
2241    },
2242    {
2243      "id": "openai/o3-mini-high",
2244      "name": "OpenAI: o3 Mini High",
2245      "cost_per_1m_in": 1.1,
2246      "cost_per_1m_out": 4.4,
2247      "cost_per_1m_in_cached": 0,
2248      "cost_per_1m_out_cached": 0.55,
2249      "context_window": 200000,
2250      "default_max_tokens": 50000,
2251      "can_reason": false,
2252      "supports_attachments": false,
2253      "options": {}
2254    },
2255    {
2256      "id": "openai/o3-pro",
2257      "name": "OpenAI: o3 Pro",
2258      "cost_per_1m_in": 20,
2259      "cost_per_1m_out": 80,
2260      "cost_per_1m_in_cached": 0,
2261      "cost_per_1m_out_cached": 0,
2262      "context_window": 200000,
2263      "default_max_tokens": 50000,
2264      "can_reason": true,
2265      "reasoning_levels": [
2266        "low",
2267        "medium",
2268        "high"
2269      ],
2270      "default_reasoning_effort": "medium",
2271      "supports_attachments": true,
2272      "options": {}
2273    },
2274    {
2275      "id": "openai/o4-mini",
2276      "name": "OpenAI: o4 Mini",
2277      "cost_per_1m_in": 1.1,
2278      "cost_per_1m_out": 4.4,
2279      "cost_per_1m_in_cached": 0,
2280      "cost_per_1m_out_cached": 0.275,
2281      "context_window": 200000,
2282      "default_max_tokens": 50000,
2283      "can_reason": true,
2284      "reasoning_levels": [
2285        "low",
2286        "medium",
2287        "high"
2288      ],
2289      "default_reasoning_effort": "medium",
2290      "supports_attachments": true,
2291      "options": {}
2292    },
2293    {
2294      "id": "openai/o4-mini-deep-research",
2295      "name": "OpenAI: o4 Mini Deep Research",
2296      "cost_per_1m_in": 2,
2297      "cost_per_1m_out": 8,
2298      "cost_per_1m_in_cached": 0,
2299      "cost_per_1m_out_cached": 0.5,
2300      "context_window": 200000,
2301      "default_max_tokens": 50000,
2302      "can_reason": true,
2303      "reasoning_levels": [
2304        "low",
2305        "medium",
2306        "high"
2307      ],
2308      "default_reasoning_effort": "medium",
2309      "supports_attachments": true,
2310      "options": {}
2311    },
2312    {
2313      "id": "openai/o4-mini-high",
2314      "name": "OpenAI: o4 Mini High",
2315      "cost_per_1m_in": 1.1,
2316      "cost_per_1m_out": 4.4,
2317      "cost_per_1m_in_cached": 0,
2318      "cost_per_1m_out_cached": 0.275,
2319      "context_window": 200000,
2320      "default_max_tokens": 50000,
2321      "can_reason": true,
2322      "reasoning_levels": [
2323        "low",
2324        "medium",
2325        "high"
2326      ],
2327      "default_reasoning_effort": "medium",
2328      "supports_attachments": true,
2329      "options": {}
2330    },
2331    {
2332      "id": "prime-intellect/intellect-3",
2333      "name": "Prime Intellect: INTELLECT-3",
2334      "cost_per_1m_in": 0.19999999999999998,
2335      "cost_per_1m_out": 1.1,
2336      "cost_per_1m_in_cached": 0,
2337      "cost_per_1m_out_cached": 0,
2338      "context_window": 131072,
2339      "default_max_tokens": 65536,
2340      "can_reason": true,
2341      "reasoning_levels": [
2342        "low",
2343        "medium",
2344        "high"
2345      ],
2346      "default_reasoning_effort": "medium",
2347      "supports_attachments": false,
2348      "options": {}
2349    },
2350    {
2351      "id": "qwen/qwen-2.5-72b-instruct",
2352      "name": "Qwen2.5 72B Instruct",
2353      "cost_per_1m_in": 0.07,
2354      "cost_per_1m_out": 0.26,
2355      "cost_per_1m_in_cached": 0,
2356      "cost_per_1m_out_cached": 0,
2357      "context_window": 32768,
2358      "default_max_tokens": 16384,
2359      "can_reason": false,
2360      "supports_attachments": false,
2361      "options": {}
2362    },
2363    {
2364      "id": "qwen/qwen-plus-2025-07-28",
2365      "name": "Qwen: Qwen Plus 0728",
2366      "cost_per_1m_in": 0.39999999999999997,
2367      "cost_per_1m_out": 1.2,
2368      "cost_per_1m_in_cached": 0,
2369      "cost_per_1m_out_cached": 0,
2370      "context_window": 1000000,
2371      "default_max_tokens": 16384,
2372      "can_reason": false,
2373      "supports_attachments": false,
2374      "options": {}
2375    },
2376    {
2377      "id": "qwen/qwen-plus-2025-07-28:thinking",
2378      "name": "Qwen: Qwen Plus 0728 (thinking)",
2379      "cost_per_1m_in": 0.39999999999999997,
2380      "cost_per_1m_out": 4,
2381      "cost_per_1m_in_cached": 0,
2382      "cost_per_1m_out_cached": 0,
2383      "context_window": 1000000,
2384      "default_max_tokens": 16384,
2385      "can_reason": true,
2386      "reasoning_levels": [
2387        "low",
2388        "medium",
2389        "high"
2390      ],
2391      "default_reasoning_effort": "medium",
2392      "supports_attachments": false,
2393      "options": {}
2394    },
2395    {
2396      "id": "qwen/qwen-vl-max",
2397      "name": "Qwen: Qwen VL Max",
2398      "cost_per_1m_in": 0.7999999999999999,
2399      "cost_per_1m_out": 3.1999999999999997,
2400      "cost_per_1m_in_cached": 0,
2401      "cost_per_1m_out_cached": 0,
2402      "context_window": 131072,
2403      "default_max_tokens": 4096,
2404      "can_reason": false,
2405      "supports_attachments": true,
2406      "options": {}
2407    },
2408    {
2409      "id": "qwen/qwen-max",
2410      "name": "Qwen: Qwen-Max ",
2411      "cost_per_1m_in": 1.5999999999999999,
2412      "cost_per_1m_out": 6.3999999999999995,
2413      "cost_per_1m_in_cached": 0,
2414      "cost_per_1m_out_cached": 0.64,
2415      "context_window": 32768,
2416      "default_max_tokens": 4096,
2417      "can_reason": false,
2418      "supports_attachments": false,
2419      "options": {}
2420    },
2421    {
2422      "id": "qwen/qwen-plus",
2423      "name": "Qwen: Qwen-Plus",
2424      "cost_per_1m_in": 0.39999999999999997,
2425      "cost_per_1m_out": 1.2,
2426      "cost_per_1m_in_cached": 0,
2427      "cost_per_1m_out_cached": 0.16,
2428      "context_window": 131072,
2429      "default_max_tokens": 4096,
2430      "can_reason": false,
2431      "supports_attachments": false,
2432      "options": {}
2433    },
2434    {
2435      "id": "qwen/qwen-turbo",
2436      "name": "Qwen: Qwen-Turbo",
2437      "cost_per_1m_in": 0.049999999999999996,
2438      "cost_per_1m_out": 0.19999999999999998,
2439      "cost_per_1m_in_cached": 0,
2440      "cost_per_1m_out_cached": 0.02,
2441      "context_window": 1000000,
2442      "default_max_tokens": 4096,
2443      "can_reason": false,
2444      "supports_attachments": false,
2445      "options": {}
2446    },
2447    {
2448      "id": "qwen/qwen3-14b",
2449      "name": "Qwen: Qwen3 14B",
2450      "cost_per_1m_in": 0.049999999999999996,
2451      "cost_per_1m_out": 0.22,
2452      "cost_per_1m_in_cached": 0,
2453      "cost_per_1m_out_cached": 0,
2454      "context_window": 40960,
2455      "default_max_tokens": 20480,
2456      "can_reason": true,
2457      "reasoning_levels": [
2458        "low",
2459        "medium",
2460        "high"
2461      ],
2462      "default_reasoning_effort": "medium",
2463      "supports_attachments": false,
2464      "options": {}
2465    },
2466    {
2467      "id": "qwen/qwen3-235b-a22b",
2468      "name": "Qwen: Qwen3 235B A22B",
2469      "cost_per_1m_in": 0.22,
2470      "cost_per_1m_out": 0.88,
2471      "cost_per_1m_in_cached": 0,
2472      "cost_per_1m_out_cached": 0,
2473      "context_window": 131072,
2474      "default_max_tokens": 13107,
2475      "can_reason": true,
2476      "reasoning_levels": [
2477        "low",
2478        "medium",
2479        "high"
2480      ],
2481      "default_reasoning_effort": "medium",
2482      "supports_attachments": false,
2483      "options": {}
2484    },
2485    {
2486      "id": "qwen/qwen3-235b-a22b:free",
2487      "name": "Qwen: Qwen3 235B A22B (free)",
2488      "cost_per_1m_in": 0,
2489      "cost_per_1m_out": 0,
2490      "cost_per_1m_in_cached": 0,
2491      "cost_per_1m_out_cached": 0,
2492      "context_window": 131072,
2493      "default_max_tokens": 13107,
2494      "can_reason": true,
2495      "reasoning_levels": [
2496        "low",
2497        "medium",
2498        "high"
2499      ],
2500      "default_reasoning_effort": "medium",
2501      "supports_attachments": false,
2502      "options": {}
2503    },
2504    {
2505      "id": "qwen/qwen3-235b-a22b-2507",
2506      "name": "Qwen: Qwen3 235B A22B Instruct 2507",
2507      "cost_per_1m_in": 0.25,
2508      "cost_per_1m_out": 1,
2509      "cost_per_1m_in_cached": 0,
2510      "cost_per_1m_out_cached": 0,
2511      "context_window": 262144,
2512      "default_max_tokens": 8192,
2513      "can_reason": false,
2514      "supports_attachments": false,
2515      "options": {}
2516    },
2517    {
2518      "id": "qwen/qwen3-235b-a22b-thinking-2507",
2519      "name": "Qwen: Qwen3 235B A22B Thinking 2507",
2520      "cost_per_1m_in": 0.13,
2521      "cost_per_1m_out": 0.6,
2522      "cost_per_1m_in_cached": 0,
2523      "cost_per_1m_out_cached": 0,
2524      "context_window": 262144,
2525      "default_max_tokens": 131072,
2526      "can_reason": true,
2527      "reasoning_levels": [
2528        "low",
2529        "medium",
2530        "high"
2531      ],
2532      "default_reasoning_effort": "medium",
2533      "supports_attachments": false,
2534      "options": {}
2535    },
2536    {
2537      "id": "qwen/qwen3-30b-a3b",
2538      "name": "Qwen: Qwen3 30B A3B",
2539      "cost_per_1m_in": 0.08,
2540      "cost_per_1m_out": 0.28,
2541      "cost_per_1m_in_cached": 0,
2542      "cost_per_1m_out_cached": 0,
2543      "context_window": 131072,
2544      "default_max_tokens": 65536,
2545      "can_reason": true,
2546      "reasoning_levels": [
2547        "low",
2548        "medium",
2549        "high"
2550      ],
2551      "default_reasoning_effort": "medium",
2552      "supports_attachments": false,
2553      "options": {}
2554    },
2555    {
2556      "id": "qwen/qwen3-30b-a3b-instruct-2507",
2557      "name": "Qwen: Qwen3 30B A3B Instruct 2507",
2558      "cost_per_1m_in": 0.08,
2559      "cost_per_1m_out": 0.33,
2560      "cost_per_1m_in_cached": 0,
2561      "cost_per_1m_out_cached": 0,
2562      "context_window": 262144,
2563      "default_max_tokens": 131072,
2564      "can_reason": false,
2565      "supports_attachments": false,
2566      "options": {}
2567    },
2568    {
2569      "id": "qwen/qwen3-32b",
2570      "name": "Qwen: Qwen3 32B",
2571      "cost_per_1m_in": 0.15,
2572      "cost_per_1m_out": 0.5,
2573      "cost_per_1m_in_cached": 0,
2574      "cost_per_1m_out_cached": 0,
2575      "context_window": 131072,
2576      "default_max_tokens": 4000,
2577      "can_reason": true,
2578      "reasoning_levels": [
2579        "low",
2580        "medium",
2581        "high"
2582      ],
2583      "default_reasoning_effort": "medium",
2584      "supports_attachments": false,
2585      "options": {}
2586    },
2587    {
2588      "id": "qwen/qwen3-4b:free",
2589      "name": "Qwen: Qwen3 4B (free)",
2590      "cost_per_1m_in": 0,
2591      "cost_per_1m_out": 0,
2592      "cost_per_1m_in_cached": 0,
2593      "cost_per_1m_out_cached": 0,
2594      "context_window": 40960,
2595      "default_max_tokens": 4096,
2596      "can_reason": true,
2597      "reasoning_levels": [
2598        "low",
2599        "medium",
2600        "high"
2601      ],
2602      "default_reasoning_effort": "medium",
2603      "supports_attachments": false,
2604      "options": {}
2605    },
2606    {
2607      "id": "qwen/qwen3-coder-30b-a3b-instruct",
2608      "name": "Qwen: Qwen3 Coder 30B A3B Instruct",
2609      "cost_per_1m_in": 0.06,
2610      "cost_per_1m_out": 0.25,
2611      "cost_per_1m_in_cached": 0,
2612      "cost_per_1m_out_cached": 0,
2613      "context_window": 262144,
2614      "default_max_tokens": 131072,
2615      "can_reason": false,
2616      "supports_attachments": false,
2617      "options": {}
2618    },
2619    {
2620      "id": "qwen/qwen3-coder",
2621      "name": "Qwen: Qwen3 Coder 480B A35B",
2622      "cost_per_1m_in": 1,
2623      "cost_per_1m_out": 1.5,
2624      "cost_per_1m_in_cached": 0,
2625      "cost_per_1m_out_cached": 0,
2626      "context_window": 262144,
2627      "default_max_tokens": 131072,
2628      "can_reason": false,
2629      "supports_attachments": false,
2630      "options": {}
2631    },
2632    {
2633      "id": "qwen/qwen3-coder:exacto",
2634      "name": "Qwen: Qwen3 Coder 480B A35B (exacto)",
2635      "cost_per_1m_in": 0.38,
2636      "cost_per_1m_out": 1.53,
2637      "cost_per_1m_in_cached": 0,
2638      "cost_per_1m_out_cached": 0,
2639      "context_window": 262144,
2640      "default_max_tokens": 131072,
2641      "can_reason": true,
2642      "reasoning_levels": [
2643        "low",
2644        "medium",
2645        "high"
2646      ],
2647      "default_reasoning_effort": "medium",
2648      "supports_attachments": false,
2649      "options": {}
2650    },
2651    {
2652      "id": "qwen/qwen3-coder:free",
2653      "name": "Qwen: Qwen3 Coder 480B A35B (free)",
2654      "cost_per_1m_in": 0,
2655      "cost_per_1m_out": 0,
2656      "cost_per_1m_in_cached": 0,
2657      "cost_per_1m_out_cached": 0,
2658      "context_window": 262000,
2659      "default_max_tokens": 131000,
2660      "can_reason": false,
2661      "supports_attachments": false,
2662      "options": {}
2663    },
2664    {
2665      "id": "qwen/qwen3-coder-flash",
2666      "name": "Qwen: Qwen3 Coder Flash",
2667      "cost_per_1m_in": 0.3,
2668      "cost_per_1m_out": 1.5,
2669      "cost_per_1m_in_cached": 0,
2670      "cost_per_1m_out_cached": 0.08,
2671      "context_window": 128000,
2672      "default_max_tokens": 32768,
2673      "can_reason": false,
2674      "supports_attachments": false,
2675      "options": {}
2676    },
2677    {
2678      "id": "qwen/qwen3-coder-plus",
2679      "name": "Qwen: Qwen3 Coder Plus",
2680      "cost_per_1m_in": 1,
2681      "cost_per_1m_out": 5,
2682      "cost_per_1m_in_cached": 0,
2683      "cost_per_1m_out_cached": 0.09999999999999999,
2684      "context_window": 128000,
2685      "default_max_tokens": 32768,
2686      "can_reason": false,
2687      "supports_attachments": false,
2688      "options": {}
2689    },
2690    {
2691      "id": "qwen/qwen3-max",
2692      "name": "Qwen: Qwen3 Max",
2693      "cost_per_1m_in": 1.2,
2694      "cost_per_1m_out": 6,
2695      "cost_per_1m_in_cached": 0,
2696      "cost_per_1m_out_cached": 0.24,
2697      "context_window": 256000,
2698      "default_max_tokens": 16384,
2699      "can_reason": false,
2700      "supports_attachments": false,
2701      "options": {}
2702    },
2703    {
2704      "id": "qwen/qwen3-next-80b-a3b-instruct",
2705      "name": "Qwen: Qwen3 Next 80B A3B Instruct",
2706      "cost_per_1m_in": 0.15,
2707      "cost_per_1m_out": 1.2,
2708      "cost_per_1m_in_cached": 0,
2709      "cost_per_1m_out_cached": 0,
2710      "context_window": 262144,
2711      "default_max_tokens": 131072,
2712      "can_reason": false,
2713      "supports_attachments": false,
2714      "options": {}
2715    },
2716    {
2717      "id": "qwen/qwen3-next-80b-a3b-thinking",
2718      "name": "Qwen: Qwen3 Next 80B A3B Thinking",
2719      "cost_per_1m_in": 0.15,
2720      "cost_per_1m_out": 1.2,
2721      "cost_per_1m_in_cached": 0,
2722      "cost_per_1m_out_cached": 0,
2723      "context_window": 262144,
2724      "default_max_tokens": 131072,
2725      "can_reason": true,
2726      "reasoning_levels": [
2727        "low",
2728        "medium",
2729        "high"
2730      ],
2731      "default_reasoning_effort": "medium",
2732      "supports_attachments": false,
2733      "options": {}
2734    },
2735    {
2736      "id": "qwen/qwen3-vl-235b-a22b-instruct",
2737      "name": "Qwen: Qwen3 VL 235B A22B Instruct",
2738      "cost_per_1m_in": 0.21,
2739      "cost_per_1m_out": 1.9,
2740      "cost_per_1m_in_cached": 0,
2741      "cost_per_1m_out_cached": 0,
2742      "context_window": 131072,
2743      "default_max_tokens": 16384,
2744      "can_reason": false,
2745      "supports_attachments": true,
2746      "options": {}
2747    },
2748    {
2749      "id": "qwen/qwen3-vl-235b-a22b-thinking",
2750      "name": "Qwen: Qwen3 VL 235B A22B Thinking",
2751      "cost_per_1m_in": 0.3,
2752      "cost_per_1m_out": 1.2,
2753      "cost_per_1m_in_cached": 0,
2754      "cost_per_1m_out_cached": 0,
2755      "context_window": 262144,
2756      "default_max_tokens": 131072,
2757      "can_reason": true,
2758      "reasoning_levels": [
2759        "low",
2760        "medium",
2761        "high"
2762      ],
2763      "default_reasoning_effort": "medium",
2764      "supports_attachments": true,
2765      "options": {}
2766    },
2767    {
2768      "id": "qwen/qwen3-vl-30b-a3b-instruct",
2769      "name": "Qwen: Qwen3 VL 30B A3B Instruct",
2770      "cost_per_1m_in": 0.15,
2771      "cost_per_1m_out": 0.6,
2772      "cost_per_1m_in_cached": 0,
2773      "cost_per_1m_out_cached": 0,
2774      "context_window": 262144,
2775      "default_max_tokens": 26214,
2776      "can_reason": false,
2777      "supports_attachments": true,
2778      "options": {}
2779    },
2780    {
2781      "id": "qwen/qwen3-vl-30b-a3b-thinking",
2782      "name": "Qwen: Qwen3 VL 30B A3B Thinking",
2783      "cost_per_1m_in": 0.16,
2784      "cost_per_1m_out": 0.7999999999999999,
2785      "cost_per_1m_in_cached": 0,
2786      "cost_per_1m_out_cached": 0,
2787      "context_window": 131072,
2788      "default_max_tokens": 16384,
2789      "can_reason": true,
2790      "reasoning_levels": [
2791        "low",
2792        "medium",
2793        "high"
2794      ],
2795      "default_reasoning_effort": "medium",
2796      "supports_attachments": true,
2797      "options": {}
2798    },
2799    {
2800      "id": "qwen/qwen3-vl-8b-instruct",
2801      "name": "Qwen: Qwen3 VL 8B Instruct",
2802      "cost_per_1m_in": 0.18,
2803      "cost_per_1m_out": 0.7,
2804      "cost_per_1m_in_cached": 0,
2805      "cost_per_1m_out_cached": 0,
2806      "context_window": 256000,
2807      "default_max_tokens": 16384,
2808      "can_reason": false,
2809      "supports_attachments": true,
2810      "options": {}
2811    },
2812    {
2813      "id": "qwen/qwen3-vl-8b-thinking",
2814      "name": "Qwen: Qwen3 VL 8B Thinking",
2815      "cost_per_1m_in": 0.18,
2816      "cost_per_1m_out": 2.0999999999999996,
2817      "cost_per_1m_in_cached": 0,
2818      "cost_per_1m_out_cached": 0,
2819      "context_window": 256000,
2820      "default_max_tokens": 16384,
2821      "can_reason": true,
2822      "reasoning_levels": [
2823        "low",
2824        "medium",
2825        "high"
2826      ],
2827      "default_reasoning_effort": "medium",
2828      "supports_attachments": true,
2829      "options": {}
2830    },
2831    {
2832      "id": "stepfun-ai/step3",
2833      "name": "StepFun: Step3",
2834      "cost_per_1m_in": 0.5700000000000001,
2835      "cost_per_1m_out": 1.42,
2836      "cost_per_1m_in_cached": 0,
2837      "cost_per_1m_out_cached": 0,
2838      "context_window": 65536,
2839      "default_max_tokens": 32768,
2840      "can_reason": true,
2841      "reasoning_levels": [
2842        "low",
2843        "medium",
2844        "high"
2845      ],
2846      "default_reasoning_effort": "medium",
2847      "supports_attachments": true,
2848      "options": {}
2849    },
2850    {
2851      "id": "tngtech/deepseek-r1t2-chimera",
2852      "name": "TNG: DeepSeek R1T2 Chimera",
2853      "cost_per_1m_in": 0.3,
2854      "cost_per_1m_out": 1.2,
2855      "cost_per_1m_in_cached": 0,
2856      "cost_per_1m_out_cached": 0,
2857      "context_window": 163840,
2858      "default_max_tokens": 81920,
2859      "can_reason": true,
2860      "reasoning_levels": [
2861        "low",
2862        "medium",
2863        "high"
2864      ],
2865      "default_reasoning_effort": "medium",
2866      "supports_attachments": false,
2867      "options": {}
2868    },
2869    {
2870      "id": "tngtech/tng-r1t-chimera",
2871      "name": "TNG: R1T Chimera",
2872      "cost_per_1m_in": 0.3,
2873      "cost_per_1m_out": 1.2,
2874      "cost_per_1m_in_cached": 0,
2875      "cost_per_1m_out_cached": 0,
2876      "context_window": 163840,
2877      "default_max_tokens": 81920,
2878      "can_reason": true,
2879      "reasoning_levels": [
2880        "low",
2881        "medium",
2882        "high"
2883      ],
2884      "default_reasoning_effort": "medium",
2885      "supports_attachments": false,
2886      "options": {}
2887    },
2888    {
2889      "id": "tngtech/tng-r1t-chimera:free",
2890      "name": "TNG: R1T Chimera (free)",
2891      "cost_per_1m_in": 0,
2892      "cost_per_1m_out": 0,
2893      "cost_per_1m_in_cached": 0,
2894      "cost_per_1m_out_cached": 0,
2895      "context_window": 163840,
2896      "default_max_tokens": 81920,
2897      "can_reason": true,
2898      "reasoning_levels": [
2899        "low",
2900        "medium",
2901        "high"
2902      ],
2903      "default_reasoning_effort": "medium",
2904      "supports_attachments": false,
2905      "options": {}
2906    },
2907    {
2908      "id": "thedrummer/rocinante-12b",
2909      "name": "TheDrummer: Rocinante 12B",
2910      "cost_per_1m_in": 0.16999999999999998,
2911      "cost_per_1m_out": 0.43,
2912      "cost_per_1m_in_cached": 0,
2913      "cost_per_1m_out_cached": 0,
2914      "context_window": 32768,
2915      "default_max_tokens": 3276,
2916      "can_reason": false,
2917      "supports_attachments": false,
2918      "options": {}
2919    },
2920    {
2921      "id": "thedrummer/unslopnemo-12b",
2922      "name": "TheDrummer: UnslopNemo 12B",
2923      "cost_per_1m_in": 0.39999999999999997,
2924      "cost_per_1m_out": 0.39999999999999997,
2925      "cost_per_1m_in_cached": 0,
2926      "cost_per_1m_out_cached": 0,
2927      "context_window": 32768,
2928      "default_max_tokens": 3276,
2929      "can_reason": false,
2930      "supports_attachments": false,
2931      "options": {}
2932    },
2933    {
2934      "id": "alibaba/tongyi-deepresearch-30b-a3b",
2935      "name": "Tongyi DeepResearch 30B A3B",
2936      "cost_per_1m_in": 0.09,
2937      "cost_per_1m_out": 0.39999999999999997,
2938      "cost_per_1m_in_cached": 0,
2939      "cost_per_1m_out_cached": 0,
2940      "context_window": 131072,
2941      "default_max_tokens": 65536,
2942      "can_reason": true,
2943      "reasoning_levels": [
2944        "low",
2945        "medium",
2946        "high"
2947      ],
2948      "default_reasoning_effort": "medium",
2949      "supports_attachments": false,
2950      "options": {}
2951    },
2952    {
2953      "id": "alibaba/tongyi-deepresearch-30b-a3b:free",
2954      "name": "Tongyi DeepResearch 30B A3B (free)",
2955      "cost_per_1m_in": 0,
2956      "cost_per_1m_out": 0,
2957      "cost_per_1m_in_cached": 0,
2958      "cost_per_1m_out_cached": 0,
2959      "context_window": 131072,
2960      "default_max_tokens": 65536,
2961      "can_reason": true,
2962      "reasoning_levels": [
2963        "low",
2964        "medium",
2965        "high"
2966      ],
2967      "default_reasoning_effort": "medium",
2968      "supports_attachments": false,
2969      "options": {}
2970    },
2971    {
2972      "id": "z-ai/glm-4-32b",
2973      "name": "Z.AI: GLM 4 32B ",
2974      "cost_per_1m_in": 0.09999999999999999,
2975      "cost_per_1m_out": 0.09999999999999999,
2976      "cost_per_1m_in_cached": 0,
2977      "cost_per_1m_out_cached": 0,
2978      "context_window": 128000,
2979      "default_max_tokens": 12800,
2980      "can_reason": false,
2981      "supports_attachments": false,
2982      "options": {}
2983    },
2984    {
2985      "id": "z-ai/glm-4.5",
2986      "name": "Z.AI: GLM 4.5",
2987      "cost_per_1m_in": 0.48,
2988      "cost_per_1m_out": 1.76,
2989      "cost_per_1m_in_cached": 0,
2990      "cost_per_1m_out_cached": 0.088,
2991      "context_window": 131072,
2992      "default_max_tokens": 49152,
2993      "can_reason": true,
2994      "reasoning_levels": [
2995        "low",
2996        "medium",
2997        "high"
2998      ],
2999      "default_reasoning_effort": "medium",
3000      "supports_attachments": false,
3001      "options": {}
3002    },
3003    {
3004      "id": "z-ai/glm-4.5-air",
3005      "name": "Z.AI: GLM 4.5 Air",
3006      "cost_per_1m_in": 0.10400000000000001,
3007      "cost_per_1m_out": 0.6799999999999999,
3008      "cost_per_1m_in_cached": 0,
3009      "cost_per_1m_out_cached": 0,
3010      "context_window": 131072,
3011      "default_max_tokens": 49152,
3012      "can_reason": true,
3013      "reasoning_levels": [
3014        "low",
3015        "medium",
3016        "high"
3017      ],
3018      "default_reasoning_effort": "medium",
3019      "supports_attachments": false,
3020      "options": {}
3021    },
3022    {
3023      "id": "z-ai/glm-4.5-air:free",
3024      "name": "Z.AI: GLM 4.5 Air (free)",
3025      "cost_per_1m_in": 0,
3026      "cost_per_1m_out": 0,
3027      "cost_per_1m_in_cached": 0,
3028      "cost_per_1m_out_cached": 0,
3029      "context_window": 131072,
3030      "default_max_tokens": 48000,
3031      "can_reason": true,
3032      "reasoning_levels": [
3033        "low",
3034        "medium",
3035        "high"
3036      ],
3037      "default_reasoning_effort": "medium",
3038      "supports_attachments": false,
3039      "options": {}
3040    },
3041    {
3042      "id": "z-ai/glm-4.5v",
3043      "name": "Z.AI: GLM 4.5V",
3044      "cost_per_1m_in": 0.48,
3045      "cost_per_1m_out": 1.44,
3046      "cost_per_1m_in_cached": 0,
3047      "cost_per_1m_out_cached": 0.088,
3048      "context_window": 65536,
3049      "default_max_tokens": 8192,
3050      "can_reason": true,
3051      "reasoning_levels": [
3052        "low",
3053        "medium",
3054        "high"
3055      ],
3056      "default_reasoning_effort": "medium",
3057      "supports_attachments": true,
3058      "options": {}
3059    },
3060    {
3061      "id": "z-ai/glm-4.6",
3062      "name": "Z.AI: GLM 4.6",
3063      "cost_per_1m_in": 0.44,
3064      "cost_per_1m_out": 1.76,
3065      "cost_per_1m_in_cached": 0,
3066      "cost_per_1m_out_cached": 0.088,
3067      "context_window": 204800,
3068      "default_max_tokens": 65536,
3069      "can_reason": true,
3070      "reasoning_levels": [
3071        "low",
3072        "medium",
3073        "high"
3074      ],
3075      "default_reasoning_effort": "medium",
3076      "supports_attachments": false,
3077      "options": {}
3078    },
3079    {
3080      "id": "z-ai/glm-4.6:exacto",
3081      "name": "Z.AI: GLM 4.6 (exacto)",
3082      "cost_per_1m_in": 0.6,
3083      "cost_per_1m_out": 2.2,
3084      "cost_per_1m_in_cached": 0,
3085      "cost_per_1m_out_cached": 0,
3086      "context_window": 200000,
3087      "default_max_tokens": 64000,
3088      "can_reason": true,
3089      "reasoning_levels": [
3090        "low",
3091        "medium",
3092        "high"
3093      ],
3094      "default_reasoning_effort": "medium",
3095      "supports_attachments": false,
3096      "options": {}
3097    },
3098    {
3099      "id": "x-ai/grok-3",
3100      "name": "xAI: Grok 3",
3101      "cost_per_1m_in": 3,
3102      "cost_per_1m_out": 15,
3103      "cost_per_1m_in_cached": 0,
3104      "cost_per_1m_out_cached": 0.75,
3105      "context_window": 131072,
3106      "default_max_tokens": 13107,
3107      "can_reason": false,
3108      "supports_attachments": false,
3109      "options": {}
3110    },
3111    {
3112      "id": "x-ai/grok-3-beta",
3113      "name": "xAI: Grok 3 Beta",
3114      "cost_per_1m_in": 3,
3115      "cost_per_1m_out": 15,
3116      "cost_per_1m_in_cached": 0,
3117      "cost_per_1m_out_cached": 0.75,
3118      "context_window": 131072,
3119      "default_max_tokens": 13107,
3120      "can_reason": false,
3121      "supports_attachments": false,
3122      "options": {}
3123    },
3124    {
3125      "id": "x-ai/grok-3-mini",
3126      "name": "xAI: Grok 3 Mini",
3127      "cost_per_1m_in": 0.6,
3128      "cost_per_1m_out": 4,
3129      "cost_per_1m_in_cached": 0,
3130      "cost_per_1m_out_cached": 0.15,
3131      "context_window": 131072,
3132      "default_max_tokens": 13107,
3133      "can_reason": true,
3134      "reasoning_levels": [
3135        "low",
3136        "medium",
3137        "high"
3138      ],
3139      "default_reasoning_effort": "medium",
3140      "supports_attachments": false,
3141      "options": {}
3142    },
3143    {
3144      "id": "x-ai/grok-3-mini-beta",
3145      "name": "xAI: Grok 3 Mini Beta",
3146      "cost_per_1m_in": 0.6,
3147      "cost_per_1m_out": 4,
3148      "cost_per_1m_in_cached": 0,
3149      "cost_per_1m_out_cached": 0.15,
3150      "context_window": 131072,
3151      "default_max_tokens": 13107,
3152      "can_reason": true,
3153      "reasoning_levels": [
3154        "low",
3155        "medium",
3156        "high"
3157      ],
3158      "default_reasoning_effort": "medium",
3159      "supports_attachments": false,
3160      "options": {}
3161    },
3162    {
3163      "id": "x-ai/grok-4",
3164      "name": "xAI: Grok 4",
3165      "cost_per_1m_in": 3,
3166      "cost_per_1m_out": 15,
3167      "cost_per_1m_in_cached": 0,
3168      "cost_per_1m_out_cached": 0.75,
3169      "context_window": 256000,
3170      "default_max_tokens": 25600,
3171      "can_reason": true,
3172      "reasoning_levels": [
3173        "low",
3174        "medium",
3175        "high"
3176      ],
3177      "default_reasoning_effort": "medium",
3178      "supports_attachments": true,
3179      "options": {}
3180    },
3181    {
3182      "id": "x-ai/grok-4-fast",
3183      "name": "xAI: Grok 4 Fast",
3184      "cost_per_1m_in": 0.19999999999999998,
3185      "cost_per_1m_out": 0.5,
3186      "cost_per_1m_in_cached": 0,
3187      "cost_per_1m_out_cached": 0.049999999999999996,
3188      "context_window": 2000000,
3189      "default_max_tokens": 15000,
3190      "can_reason": true,
3191      "reasoning_levels": [
3192        "low",
3193        "medium",
3194        "high"
3195      ],
3196      "default_reasoning_effort": "medium",
3197      "supports_attachments": true,
3198      "options": {}
3199    },
3200    {
3201      "id": "x-ai/grok-4.1-fast",
3202      "name": "xAI: Grok 4.1 Fast",
3203      "cost_per_1m_in": 0.19999999999999998,
3204      "cost_per_1m_out": 0.5,
3205      "cost_per_1m_in_cached": 0,
3206      "cost_per_1m_out_cached": 0.049999999999999996,
3207      "context_window": 2000000,
3208      "default_max_tokens": 15000,
3209      "can_reason": true,
3210      "reasoning_levels": [
3211        "low",
3212        "medium",
3213        "high"
3214      ],
3215      "default_reasoning_effort": "medium",
3216      "supports_attachments": true,
3217      "options": {}
3218    },
3219    {
3220      "id": "x-ai/grok-code-fast-1",
3221      "name": "xAI: Grok Code Fast 1",
3222      "cost_per_1m_in": 0.19999999999999998,
3223      "cost_per_1m_out": 1.5,
3224      "cost_per_1m_in_cached": 0,
3225      "cost_per_1m_out_cached": 0.02,
3226      "context_window": 256000,
3227      "default_max_tokens": 5000,
3228      "can_reason": true,
3229      "reasoning_levels": [
3230        "low",
3231        "medium",
3232        "high"
3233      ],
3234      "default_reasoning_effort": "medium",
3235      "supports_attachments": false,
3236      "options": {}
3237    }
3238  ],
3239  "default_headers": {
3240    "HTTP-Referer": "https://charm.land",
3241    "X-Title": "Crush"
3242  }
3243}