openrouter.json

   1{
   2  "name": "OpenRouter",
   3  "id": "openrouter",
   4  "api_key": "$OPENROUTER_API_KEY",
   5  "api_endpoint": "https://openrouter.ai/api/v1",
   6  "type": "openrouter",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-3.5-haiku",
   9  "models": [
  10    {
  11      "id": "ai21/jamba-large-1.7",
  12      "name": "AI21: Jamba Large 1.7",
  13      "cost_per_1m_in": 2,
  14      "cost_per_1m_out": 8,
  15      "cost_per_1m_in_cached": 0,
  16      "cost_per_1m_out_cached": 0,
  17      "context_window": 256000,
  18      "default_max_tokens": 2048,
  19      "can_reason": false,
  20      "supports_attachments": false,
  21      "options": {}
  22    },
  23    {
  24      "id": "ai21/jamba-mini-1.7",
  25      "name": "AI21: Jamba Mini 1.7",
  26      "cost_per_1m_in": 0.19999999999999998,
  27      "cost_per_1m_out": 0.39999999999999997,
  28      "cost_per_1m_in_cached": 0,
  29      "cost_per_1m_out_cached": 0,
  30      "context_window": 256000,
  31      "default_max_tokens": 2048,
  32      "can_reason": false,
  33      "supports_attachments": false,
  34      "options": {}
  35    },
  36    {
  37      "id": "allenai/olmo-3-7b-instruct",
  38      "name": "AllenAI: Olmo 3 7B Instruct",
  39      "cost_per_1m_in": 0.09999999999999999,
  40      "cost_per_1m_out": 0.19999999999999998,
  41      "cost_per_1m_in_cached": 0,
  42      "cost_per_1m_out_cached": 0,
  43      "context_window": 65536,
  44      "default_max_tokens": 32768,
  45      "can_reason": false,
  46      "supports_attachments": false,
  47      "options": {}
  48    },
  49    {
  50      "id": "amazon/nova-lite-v1",
  51      "name": "Amazon: Nova Lite 1.0",
  52      "cost_per_1m_in": 0.06,
  53      "cost_per_1m_out": 0.24,
  54      "cost_per_1m_in_cached": 0,
  55      "cost_per_1m_out_cached": 0,
  56      "context_window": 300000,
  57      "default_max_tokens": 2560,
  58      "can_reason": false,
  59      "supports_attachments": true,
  60      "options": {}
  61    },
  62    {
  63      "id": "amazon/nova-micro-v1",
  64      "name": "Amazon: Nova Micro 1.0",
  65      "cost_per_1m_in": 0.035,
  66      "cost_per_1m_out": 0.14,
  67      "cost_per_1m_in_cached": 0,
  68      "cost_per_1m_out_cached": 0,
  69      "context_window": 128000,
  70      "default_max_tokens": 2560,
  71      "can_reason": false,
  72      "supports_attachments": false,
  73      "options": {}
  74    },
  75    {
  76      "id": "amazon/nova-premier-v1",
  77      "name": "Amazon: Nova Premier 1.0",
  78      "cost_per_1m_in": 2.5,
  79      "cost_per_1m_out": 12.5,
  80      "cost_per_1m_in_cached": 0,
  81      "cost_per_1m_out_cached": 0.625,
  82      "context_window": 1000000,
  83      "default_max_tokens": 16000,
  84      "can_reason": false,
  85      "supports_attachments": true,
  86      "options": {}
  87    },
  88    {
  89      "id": "amazon/nova-pro-v1",
  90      "name": "Amazon: Nova Pro 1.0",
  91      "cost_per_1m_in": 0.7999999999999999,
  92      "cost_per_1m_out": 3.1999999999999997,
  93      "cost_per_1m_in_cached": 0,
  94      "cost_per_1m_out_cached": 0,
  95      "context_window": 300000,
  96      "default_max_tokens": 2560,
  97      "can_reason": false,
  98      "supports_attachments": true,
  99      "options": {}
 100    },
 101    {
 102      "id": "anthropic/claude-3-haiku",
 103      "name": "Anthropic: Claude 3 Haiku",
 104      "cost_per_1m_in": 0.25,
 105      "cost_per_1m_out": 1.25,
 106      "cost_per_1m_in_cached": 0.3,
 107      "cost_per_1m_out_cached": 0.03,
 108      "context_window": 200000,
 109      "default_max_tokens": 2048,
 110      "can_reason": false,
 111      "supports_attachments": true,
 112      "options": {}
 113    },
 114    {
 115      "id": "anthropic/claude-3-opus",
 116      "name": "Anthropic: Claude 3 Opus",
 117      "cost_per_1m_in": 15,
 118      "cost_per_1m_out": 75,
 119      "cost_per_1m_in_cached": 18.75,
 120      "cost_per_1m_out_cached": 1.5,
 121      "context_window": 200000,
 122      "default_max_tokens": 2048,
 123      "can_reason": false,
 124      "supports_attachments": true,
 125      "options": {}
 126    },
 127    {
 128      "id": "anthropic/claude-3.5-haiku",
 129      "name": "Anthropic: Claude 3.5 Haiku",
 130      "cost_per_1m_in": 0.7999999999999999,
 131      "cost_per_1m_out": 4,
 132      "cost_per_1m_in_cached": 1,
 133      "cost_per_1m_out_cached": 0.08,
 134      "context_window": 200000,
 135      "default_max_tokens": 4096,
 136      "can_reason": false,
 137      "supports_attachments": true,
 138      "options": {}
 139    },
 140    {
 141      "id": "anthropic/claude-3.5-haiku-20241022",
 142      "name": "Anthropic: Claude 3.5 Haiku (2024-10-22)",
 143      "cost_per_1m_in": 0.7999999999999999,
 144      "cost_per_1m_out": 4,
 145      "cost_per_1m_in_cached": 1,
 146      "cost_per_1m_out_cached": 0.08,
 147      "context_window": 200000,
 148      "default_max_tokens": 4096,
 149      "can_reason": false,
 150      "supports_attachments": true,
 151      "options": {}
 152    },
 153    {
 154      "id": "anthropic/claude-3.5-sonnet",
 155      "name": "Anthropic: Claude 3.5 Sonnet",
 156      "cost_per_1m_in": 6,
 157      "cost_per_1m_out": 30,
 158      "cost_per_1m_in_cached": 7.5,
 159      "cost_per_1m_out_cached": 0.6,
 160      "context_window": 200000,
 161      "default_max_tokens": 4096,
 162      "can_reason": false,
 163      "supports_attachments": true,
 164      "options": {}
 165    },
 166    {
 167      "id": "anthropic/claude-3.7-sonnet",
 168      "name": "Anthropic: Claude 3.7 Sonnet",
 169      "cost_per_1m_in": 3,
 170      "cost_per_1m_out": 15,
 171      "cost_per_1m_in_cached": 3.75,
 172      "cost_per_1m_out_cached": 0.3,
 173      "context_window": 200000,
 174      "default_max_tokens": 32000,
 175      "can_reason": true,
 176      "reasoning_levels": [
 177        "low",
 178        "medium",
 179        "high"
 180      ],
 181      "default_reasoning_effort": "medium",
 182      "supports_attachments": true,
 183      "options": {}
 184    },
 185    {
 186      "id": "anthropic/claude-3.7-sonnet:thinking",
 187      "name": "Anthropic: Claude 3.7 Sonnet (thinking)",
 188      "cost_per_1m_in": 3,
 189      "cost_per_1m_out": 15,
 190      "cost_per_1m_in_cached": 3.75,
 191      "cost_per_1m_out_cached": 0.3,
 192      "context_window": 200000,
 193      "default_max_tokens": 32000,
 194      "can_reason": true,
 195      "reasoning_levels": [
 196        "low",
 197        "medium",
 198        "high"
 199      ],
 200      "default_reasoning_effort": "medium",
 201      "supports_attachments": true,
 202      "options": {}
 203    },
 204    {
 205      "id": "anthropic/claude-haiku-4.5",
 206      "name": "Anthropic: Claude Haiku 4.5",
 207      "cost_per_1m_in": 1,
 208      "cost_per_1m_out": 5,
 209      "cost_per_1m_in_cached": 1.25,
 210      "cost_per_1m_out_cached": 0.09999999999999999,
 211      "context_window": 200000,
 212      "default_max_tokens": 32000,
 213      "can_reason": true,
 214      "reasoning_levels": [
 215        "low",
 216        "medium",
 217        "high"
 218      ],
 219      "default_reasoning_effort": "medium",
 220      "supports_attachments": true,
 221      "options": {}
 222    },
 223    {
 224      "id": "anthropic/claude-opus-4",
 225      "name": "Anthropic: Claude Opus 4",
 226      "cost_per_1m_in": 15,
 227      "cost_per_1m_out": 75,
 228      "cost_per_1m_in_cached": 18.75,
 229      "cost_per_1m_out_cached": 1.5,
 230      "context_window": 200000,
 231      "default_max_tokens": 16000,
 232      "can_reason": true,
 233      "reasoning_levels": [
 234        "low",
 235        "medium",
 236        "high"
 237      ],
 238      "default_reasoning_effort": "medium",
 239      "supports_attachments": true,
 240      "options": {}
 241    },
 242    {
 243      "id": "anthropic/claude-opus-4.1",
 244      "name": "Anthropic: Claude Opus 4.1",
 245      "cost_per_1m_in": 15,
 246      "cost_per_1m_out": 75,
 247      "cost_per_1m_in_cached": 18.75,
 248      "cost_per_1m_out_cached": 1.5,
 249      "context_window": 200000,
 250      "default_max_tokens": 16000,
 251      "can_reason": true,
 252      "reasoning_levels": [
 253        "low",
 254        "medium",
 255        "high"
 256      ],
 257      "default_reasoning_effort": "medium",
 258      "supports_attachments": true,
 259      "options": {}
 260    },
 261    {
 262      "id": "anthropic/claude-opus-4.5",
 263      "name": "Anthropic: Claude Opus 4.5",
 264      "cost_per_1m_in": 5,
 265      "cost_per_1m_out": 25,
 266      "cost_per_1m_in_cached": 6.25,
 267      "cost_per_1m_out_cached": 0.5,
 268      "context_window": 200000,
 269      "default_max_tokens": 32000,
 270      "can_reason": true,
 271      "reasoning_levels": [
 272        "low",
 273        "medium",
 274        "high"
 275      ],
 276      "default_reasoning_effort": "medium",
 277      "supports_attachments": true,
 278      "options": {}
 279    },
 280    {
 281      "id": "anthropic/claude-sonnet-4",
 282      "name": "Anthropic: Claude Sonnet 4",
 283      "cost_per_1m_in": 3,
 284      "cost_per_1m_out": 15,
 285      "cost_per_1m_in_cached": 3.75,
 286      "cost_per_1m_out_cached": 0.3,
 287      "context_window": 1000000,
 288      "default_max_tokens": 32000,
 289      "can_reason": true,
 290      "reasoning_levels": [
 291        "low",
 292        "medium",
 293        "high"
 294      ],
 295      "default_reasoning_effort": "medium",
 296      "supports_attachments": true,
 297      "options": {}
 298    },
 299    {
 300      "id": "anthropic/claude-sonnet-4.5",
 301      "name": "Anthropic: Claude Sonnet 4.5",
 302      "cost_per_1m_in": 3,
 303      "cost_per_1m_out": 15,
 304      "cost_per_1m_in_cached": 3.75,
 305      "cost_per_1m_out_cached": 0.3,
 306      "context_window": 1000000,
 307      "default_max_tokens": 32000,
 308      "can_reason": true,
 309      "reasoning_levels": [
 310        "low",
 311        "medium",
 312        "high"
 313      ],
 314      "default_reasoning_effort": "medium",
 315      "supports_attachments": true,
 316      "options": {}
 317    },
 318    {
 319      "id": "arcee-ai/trinity-mini",
 320      "name": "Arcee AI: Trinity Mini",
 321      "cost_per_1m_in": 0.045,
 322      "cost_per_1m_out": 0.15,
 323      "cost_per_1m_in_cached": 0,
 324      "cost_per_1m_out_cached": 0,
 325      "context_window": 128000,
 326      "default_max_tokens": 12800,
 327      "can_reason": true,
 328      "reasoning_levels": [
 329        "low",
 330        "medium",
 331        "high"
 332      ],
 333      "default_reasoning_effort": "medium",
 334      "supports_attachments": false,
 335      "options": {}
 336    },
 337    {
 338      "id": "arcee-ai/trinity-mini:free",
 339      "name": "Arcee AI: Trinity Mini (free)",
 340      "cost_per_1m_in": 0,
 341      "cost_per_1m_out": 0,
 342      "cost_per_1m_in_cached": 0,
 343      "cost_per_1m_out_cached": 0,
 344      "context_window": 131072,
 345      "default_max_tokens": 13107,
 346      "can_reason": true,
 347      "reasoning_levels": [
 348        "low",
 349        "medium",
 350        "high"
 351      ],
 352      "default_reasoning_effort": "medium",
 353      "supports_attachments": false,
 354      "options": {}
 355    },
 356    {
 357      "id": "arcee-ai/virtuoso-large",
 358      "name": "Arcee AI: Virtuoso Large",
 359      "cost_per_1m_in": 0.75,
 360      "cost_per_1m_out": 1.2,
 361      "cost_per_1m_in_cached": 0,
 362      "cost_per_1m_out_cached": 0,
 363      "context_window": 131072,
 364      "default_max_tokens": 32000,
 365      "can_reason": false,
 366      "supports_attachments": false,
 367      "options": {}
 368    },
 369    {
 370      "id": "baidu/ernie-4.5-21b-a3b",
 371      "name": "Baidu: ERNIE 4.5 21B A3B",
 372      "cost_per_1m_in": 0.056,
 373      "cost_per_1m_out": 0.224,
 374      "cost_per_1m_in_cached": 0,
 375      "cost_per_1m_out_cached": 0,
 376      "context_window": 120000,
 377      "default_max_tokens": 4000,
 378      "can_reason": false,
 379      "supports_attachments": false,
 380      "options": {}
 381    },
 382    {
 383      "id": "baidu/ernie-4.5-vl-28b-a3b",
 384      "name": "Baidu: ERNIE 4.5 VL 28B A3B",
 385      "cost_per_1m_in": 0.112,
 386      "cost_per_1m_out": 0.448,
 387      "cost_per_1m_in_cached": 0,
 388      "cost_per_1m_out_cached": 0,
 389      "context_window": 30000,
 390      "default_max_tokens": 4000,
 391      "can_reason": true,
 392      "reasoning_levels": [
 393        "low",
 394        "medium",
 395        "high"
 396      ],
 397      "default_reasoning_effort": "medium",
 398      "supports_attachments": true,
 399      "options": {}
 400    },
 401    {
 402      "id": "openrouter/bert-nebulon-alpha",
 403      "name": "Bert-Nebulon Alpha",
 404      "cost_per_1m_in": 0,
 405      "cost_per_1m_out": 0,
 406      "cost_per_1m_in_cached": 0,
 407      "cost_per_1m_out_cached": 0,
 408      "context_window": 256000,
 409      "default_max_tokens": 25600,
 410      "can_reason": false,
 411      "supports_attachments": true,
 412      "options": {}
 413    },
 414    {
 415      "id": "deepcogito/cogito-v2-preview-llama-109b-moe",
 416      "name": "Cogito V2 Preview Llama 109B",
 417      "cost_per_1m_in": 0.18,
 418      "cost_per_1m_out": 0.59,
 419      "cost_per_1m_in_cached": 0,
 420      "cost_per_1m_out_cached": 0,
 421      "context_window": 32767,
 422      "default_max_tokens": 3276,
 423      "can_reason": true,
 424      "reasoning_levels": [
 425        "low",
 426        "medium",
 427        "high"
 428      ],
 429      "default_reasoning_effort": "medium",
 430      "supports_attachments": true,
 431      "options": {}
 432    },
 433    {
 434      "id": "cohere/command-r-08-2024",
 435      "name": "Cohere: Command R (08-2024)",
 436      "cost_per_1m_in": 0.15,
 437      "cost_per_1m_out": 0.6,
 438      "cost_per_1m_in_cached": 0,
 439      "cost_per_1m_out_cached": 0,
 440      "context_window": 128000,
 441      "default_max_tokens": 2000,
 442      "can_reason": false,
 443      "supports_attachments": false,
 444      "options": {}
 445    },
 446    {
 447      "id": "cohere/command-r-plus-08-2024",
 448      "name": "Cohere: Command R+ (08-2024)",
 449      "cost_per_1m_in": 2.5,
 450      "cost_per_1m_out": 10,
 451      "cost_per_1m_in_cached": 0,
 452      "cost_per_1m_out_cached": 0,
 453      "context_window": 128000,
 454      "default_max_tokens": 2000,
 455      "can_reason": false,
 456      "supports_attachments": false,
 457      "options": {}
 458    },
 459    {
 460      "id": "deepcogito/cogito-v2-preview-llama-405b",
 461      "name": "Deep Cogito: Cogito V2 Preview Llama 405B",
 462      "cost_per_1m_in": 3.5,
 463      "cost_per_1m_out": 3.5,
 464      "cost_per_1m_in_cached": 0,
 465      "cost_per_1m_out_cached": 0,
 466      "context_window": 32768,
 467      "default_max_tokens": 3276,
 468      "can_reason": true,
 469      "reasoning_levels": [
 470        "low",
 471        "medium",
 472        "high"
 473      ],
 474      "default_reasoning_effort": "medium",
 475      "supports_attachments": false,
 476      "options": {}
 477    },
 478    {
 479      "id": "deepcogito/cogito-v2-preview-llama-70b",
 480      "name": "Deep Cogito: Cogito V2 Preview Llama 70B",
 481      "cost_per_1m_in": 0.88,
 482      "cost_per_1m_out": 0.88,
 483      "cost_per_1m_in_cached": 0,
 484      "cost_per_1m_out_cached": 0,
 485      "context_window": 32768,
 486      "default_max_tokens": 3276,
 487      "can_reason": true,
 488      "reasoning_levels": [
 489        "low",
 490        "medium",
 491        "high"
 492      ],
 493      "default_reasoning_effort": "medium",
 494      "supports_attachments": false,
 495      "options": {}
 496    },
 497    {
 498      "id": "deepseek/deepseek-chat",
 499      "name": "DeepSeek: DeepSeek V3",
 500      "cost_per_1m_in": 0.32,
 501      "cost_per_1m_out": 1.04,
 502      "cost_per_1m_in_cached": 0,
 503      "cost_per_1m_out_cached": 0,
 504      "context_window": 64000,
 505      "default_max_tokens": 8000,
 506      "can_reason": false,
 507      "supports_attachments": false,
 508      "options": {}
 509    },
 510    {
 511      "id": "deepseek/deepseek-chat-v3-0324",
 512      "name": "DeepSeek: DeepSeek V3 0324",
 513      "cost_per_1m_in": 0.77,
 514      "cost_per_1m_out": 0.77,
 515      "cost_per_1m_in_cached": 0,
 516      "cost_per_1m_out_cached": 0,
 517      "context_window": 163840,
 518      "default_max_tokens": 65536,
 519      "can_reason": true,
 520      "reasoning_levels": [
 521        "low",
 522        "medium",
 523        "high"
 524      ],
 525      "default_reasoning_effort": "medium",
 526      "supports_attachments": false,
 527      "options": {}
 528    },
 529    {
 530      "id": "deepseek/deepseek-chat-v3.1",
 531      "name": "DeepSeek: DeepSeek V3.1",
 532      "cost_per_1m_in": 0.21,
 533      "cost_per_1m_out": 0.7899999999999999,
 534      "cost_per_1m_in_cached": 0,
 535      "cost_per_1m_out_cached": 0.16799999999999998,
 536      "context_window": 163840,
 537      "default_max_tokens": 16384,
 538      "can_reason": true,
 539      "reasoning_levels": [
 540        "low",
 541        "medium",
 542        "high"
 543      ],
 544      "default_reasoning_effort": "medium",
 545      "supports_attachments": false,
 546      "options": {}
 547    },
 548    {
 549      "id": "deepseek/deepseek-v3.1-terminus",
 550      "name": "DeepSeek: DeepSeek V3.1 Terminus",
 551      "cost_per_1m_in": 0.21,
 552      "cost_per_1m_out": 0.7899999999999999,
 553      "cost_per_1m_in_cached": 0,
 554      "cost_per_1m_out_cached": 0.16799999999999998,
 555      "context_window": 163840,
 556      "default_max_tokens": 16384,
 557      "can_reason": true,
 558      "reasoning_levels": [
 559        "low",
 560        "medium",
 561        "high"
 562      ],
 563      "default_reasoning_effort": "medium",
 564      "supports_attachments": false,
 565      "options": {}
 566    },
 567    {
 568      "id": "deepseek/deepseek-v3.1-terminus:exacto",
 569      "name": "DeepSeek: DeepSeek V3.1 Terminus (exacto)",
 570      "cost_per_1m_in": 0.27,
 571      "cost_per_1m_out": 1,
 572      "cost_per_1m_in_cached": 0,
 573      "cost_per_1m_out_cached": 0,
 574      "context_window": 163840,
 575      "default_max_tokens": 16384,
 576      "can_reason": true,
 577      "reasoning_levels": [
 578        "low",
 579        "medium",
 580        "high"
 581      ],
 582      "default_reasoning_effort": "medium",
 583      "supports_attachments": false,
 584      "options": {}
 585    },
 586    {
 587      "id": "deepseek/deepseek-v3.2",
 588      "name": "DeepSeek: DeepSeek V3.2",
 589      "cost_per_1m_in": 0.28,
 590      "cost_per_1m_out": 0.39999999999999997,
 591      "cost_per_1m_in_cached": 0,
 592      "cost_per_1m_out_cached": 0,
 593      "context_window": 163840,
 594      "default_max_tokens": 32768,
 595      "can_reason": true,
 596      "reasoning_levels": [
 597        "low",
 598        "medium",
 599        "high"
 600      ],
 601      "default_reasoning_effort": "medium",
 602      "supports_attachments": false,
 603      "options": {}
 604    },
 605    {
 606      "id": "deepseek/deepseek-v3.2-exp",
 607      "name": "DeepSeek: DeepSeek V3.2 Exp",
 608      "cost_per_1m_in": 0.28,
 609      "cost_per_1m_out": 0.39999999999999997,
 610      "cost_per_1m_in_cached": 0,
 611      "cost_per_1m_out_cached": 0,
 612      "context_window": 163840,
 613      "default_max_tokens": 32768,
 614      "can_reason": true,
 615      "reasoning_levels": [
 616        "low",
 617        "medium",
 618        "high"
 619      ],
 620      "default_reasoning_effort": "medium",
 621      "supports_attachments": false,
 622      "options": {}
 623    },
 624    {
 625      "id": "deepseek/deepseek-r1",
 626      "name": "DeepSeek: R1",
 627      "cost_per_1m_in": 0.7,
 628      "cost_per_1m_out": 2.4,
 629      "cost_per_1m_in_cached": 0,
 630      "cost_per_1m_out_cached": 0,
 631      "context_window": 163840,
 632      "default_max_tokens": 81920,
 633      "can_reason": true,
 634      "reasoning_levels": [
 635        "low",
 636        "medium",
 637        "high"
 638      ],
 639      "default_reasoning_effort": "medium",
 640      "supports_attachments": false,
 641      "options": {}
 642    },
 643    {
 644      "id": "deepseek/deepseek-r1-0528",
 645      "name": "DeepSeek: R1 0528",
 646      "cost_per_1m_in": 0.39999999999999997,
 647      "cost_per_1m_out": 1.75,
 648      "cost_per_1m_in_cached": 0,
 649      "cost_per_1m_out_cached": 0,
 650      "context_window": 163840,
 651      "default_max_tokens": 81920,
 652      "can_reason": true,
 653      "reasoning_levels": [
 654        "low",
 655        "medium",
 656        "high"
 657      ],
 658      "default_reasoning_effort": "medium",
 659      "supports_attachments": false,
 660      "options": {}
 661    },
 662    {
 663      "id": "deepseek/deepseek-r1-distill-llama-70b",
 664      "name": "DeepSeek: R1 Distill Llama 70B",
 665      "cost_per_1m_in": 0.03,
 666      "cost_per_1m_out": 0.13,
 667      "cost_per_1m_in_cached": 0,
 668      "cost_per_1m_out_cached": 0,
 669      "context_window": 131072,
 670      "default_max_tokens": 65536,
 671      "can_reason": true,
 672      "reasoning_levels": [
 673        "low",
 674        "medium",
 675        "high"
 676      ],
 677      "default_reasoning_effort": "medium",
 678      "supports_attachments": false,
 679      "options": {}
 680    },
 681    {
 682      "id": "google/gemini-2.0-flash-001",
 683      "name": "Google: Gemini 2.0 Flash",
 684      "cost_per_1m_in": 0.09999999999999999,
 685      "cost_per_1m_out": 0.39999999999999997,
 686      "cost_per_1m_in_cached": 0.18330000000000002,
 687      "cost_per_1m_out_cached": 0.024999999999999998,
 688      "context_window": 1048576,
 689      "default_max_tokens": 4096,
 690      "can_reason": false,
 691      "supports_attachments": true,
 692      "options": {}
 693    },
 694    {
 695      "id": "google/gemini-2.0-flash-exp:free",
 696      "name": "Google: Gemini 2.0 Flash Experimental (free)",
 697      "cost_per_1m_in": 0,
 698      "cost_per_1m_out": 0,
 699      "cost_per_1m_in_cached": 0,
 700      "cost_per_1m_out_cached": 0,
 701      "context_window": 1048576,
 702      "default_max_tokens": 4096,
 703      "can_reason": false,
 704      "supports_attachments": true,
 705      "options": {}
 706    },
 707    {
 708      "id": "google/gemini-2.0-flash-lite-001",
 709      "name": "Google: Gemini 2.0 Flash Lite",
 710      "cost_per_1m_in": 0.075,
 711      "cost_per_1m_out": 0.3,
 712      "cost_per_1m_in_cached": 0,
 713      "cost_per_1m_out_cached": 0,
 714      "context_window": 1048576,
 715      "default_max_tokens": 4096,
 716      "can_reason": false,
 717      "supports_attachments": true,
 718      "options": {}
 719    },
 720    {
 721      "id": "google/gemini-2.5-flash",
 722      "name": "Google: Gemini 2.5 Flash",
 723      "cost_per_1m_in": 0.3,
 724      "cost_per_1m_out": 2.5,
 725      "cost_per_1m_in_cached": 0.3833,
 726      "cost_per_1m_out_cached": 0.03,
 727      "context_window": 1048576,
 728      "default_max_tokens": 32767,
 729      "can_reason": true,
 730      "reasoning_levels": [
 731        "low",
 732        "medium",
 733        "high"
 734      ],
 735      "default_reasoning_effort": "medium",
 736      "supports_attachments": true,
 737      "options": {}
 738    },
 739    {
 740      "id": "google/gemini-2.5-flash-lite",
 741      "name": "Google: Gemini 2.5 Flash Lite",
 742      "cost_per_1m_in": 0.09999999999999999,
 743      "cost_per_1m_out": 0.39999999999999997,
 744      "cost_per_1m_in_cached": 0.18330000000000002,
 745      "cost_per_1m_out_cached": 0.024999999999999998,
 746      "context_window": 1048576,
 747      "default_max_tokens": 32767,
 748      "can_reason": true,
 749      "reasoning_levels": [
 750        "low",
 751        "medium",
 752        "high"
 753      ],
 754      "default_reasoning_effort": "medium",
 755      "supports_attachments": true,
 756      "options": {}
 757    },
 758    {
 759      "id": "google/gemini-2.5-flash-lite-preview-09-2025",
 760      "name": "Google: Gemini 2.5 Flash Lite Preview 09-2025",
 761      "cost_per_1m_in": 0.09999999999999999,
 762      "cost_per_1m_out": 0.39999999999999997,
 763      "cost_per_1m_in_cached": 0,
 764      "cost_per_1m_out_cached": 0,
 765      "context_window": 1048576,
 766      "default_max_tokens": 32768,
 767      "can_reason": true,
 768      "reasoning_levels": [
 769        "low",
 770        "medium",
 771        "high"
 772      ],
 773      "default_reasoning_effort": "medium",
 774      "supports_attachments": true,
 775      "options": {}
 776    },
 777    {
 778      "id": "google/gemini-2.5-flash-preview-09-2025",
 779      "name": "Google: Gemini 2.5 Flash Preview 09-2025",
 780      "cost_per_1m_in": 0.3,
 781      "cost_per_1m_out": 2.5,
 782      "cost_per_1m_in_cached": 0.3833,
 783      "cost_per_1m_out_cached": 0.075,
 784      "context_window": 1048576,
 785      "default_max_tokens": 32768,
 786      "can_reason": true,
 787      "reasoning_levels": [
 788        "low",
 789        "medium",
 790        "high"
 791      ],
 792      "default_reasoning_effort": "medium",
 793      "supports_attachments": true,
 794      "options": {}
 795    },
 796    {
 797      "id": "google/gemini-2.5-pro",
 798      "name": "Google: Gemini 2.5 Pro",
 799      "cost_per_1m_in": 1.25,
 800      "cost_per_1m_out": 10,
 801      "cost_per_1m_in_cached": 1.625,
 802      "cost_per_1m_out_cached": 0.125,
 803      "context_window": 1048576,
 804      "default_max_tokens": 32768,
 805      "can_reason": true,
 806      "reasoning_levels": [
 807        "low",
 808        "medium",
 809        "high"
 810      ],
 811      "default_reasoning_effort": "medium",
 812      "supports_attachments": true,
 813      "options": {}
 814    },
 815    {
 816      "id": "google/gemini-2.5-pro-preview-05-06",
 817      "name": "Google: Gemini 2.5 Pro Preview 05-06",
 818      "cost_per_1m_in": 1.25,
 819      "cost_per_1m_out": 10,
 820      "cost_per_1m_in_cached": 1.625,
 821      "cost_per_1m_out_cached": 0.125,
 822      "context_window": 1048576,
 823      "default_max_tokens": 32768,
 824      "can_reason": true,
 825      "reasoning_levels": [
 826        "low",
 827        "medium",
 828        "high"
 829      ],
 830      "default_reasoning_effort": "medium",
 831      "supports_attachments": true,
 832      "options": {}
 833    },
 834    {
 835      "id": "google/gemini-2.5-pro-preview",
 836      "name": "Google: Gemini 2.5 Pro Preview 06-05",
 837      "cost_per_1m_in": 1.25,
 838      "cost_per_1m_out": 10,
 839      "cost_per_1m_in_cached": 1.625,
 840      "cost_per_1m_out_cached": 0.125,
 841      "context_window": 1048576,
 842      "default_max_tokens": 32768,
 843      "can_reason": true,
 844      "reasoning_levels": [
 845        "low",
 846        "medium",
 847        "high"
 848      ],
 849      "default_reasoning_effort": "medium",
 850      "supports_attachments": true,
 851      "options": {}
 852    },
 853    {
 854      "id": "google/gemini-3-pro-preview",
 855      "name": "Google: Gemini 3 Pro Preview",
 856      "cost_per_1m_in": 2,
 857      "cost_per_1m_out": 12,
 858      "cost_per_1m_in_cached": 2.375,
 859      "cost_per_1m_out_cached": 0.19999999999999998,
 860      "context_window": 1048576,
 861      "default_max_tokens": 32768,
 862      "can_reason": true,
 863      "reasoning_levels": [
 864        "low",
 865        "medium",
 866        "high"
 867      ],
 868      "default_reasoning_effort": "medium",
 869      "supports_attachments": true,
 870      "options": {}
 871    },
 872    {
 873      "id": "inception/mercury",
 874      "name": "Inception: Mercury",
 875      "cost_per_1m_in": 0.25,
 876      "cost_per_1m_out": 1,
 877      "cost_per_1m_in_cached": 0,
 878      "cost_per_1m_out_cached": 0,
 879      "context_window": 128000,
 880      "default_max_tokens": 8192,
 881      "can_reason": false,
 882      "supports_attachments": false,
 883      "options": {}
 884    },
 885    {
 886      "id": "inception/mercury-coder",
 887      "name": "Inception: Mercury Coder",
 888      "cost_per_1m_in": 0.25,
 889      "cost_per_1m_out": 1,
 890      "cost_per_1m_in_cached": 0,
 891      "cost_per_1m_out_cached": 0,
 892      "context_window": 128000,
 893      "default_max_tokens": 8192,
 894      "can_reason": false,
 895      "supports_attachments": false,
 896      "options": {}
 897    },
 898    {
 899      "id": "kwaipilot/kat-coder-pro:free",
 900      "name": "Kwaipilot: KAT-Coder-Pro V1 (free)",
 901      "cost_per_1m_in": 0,
 902      "cost_per_1m_out": 0,
 903      "cost_per_1m_in_cached": 0,
 904      "cost_per_1m_out_cached": 0,
 905      "context_window": 256000,
 906      "default_max_tokens": 16384,
 907      "can_reason": false,
 908      "supports_attachments": false,
 909      "options": {}
 910    },
 911    {
 912      "id": "meituan/longcat-flash-chat:free",
 913      "name": "Meituan: LongCat Flash Chat (free)",
 914      "cost_per_1m_in": 0,
 915      "cost_per_1m_out": 0,
 916      "cost_per_1m_in_cached": 0,
 917      "cost_per_1m_out_cached": 0,
 918      "context_window": 131072,
 919      "default_max_tokens": 65536,
 920      "can_reason": false,
 921      "supports_attachments": false,
 922      "options": {}
 923    },
 924    {
 925      "id": "meta-llama/llama-3.1-405b-instruct",
 926      "name": "Meta: Llama 3.1 405B Instruct",
 927      "cost_per_1m_in": 3.5,
 928      "cost_per_1m_out": 3.5,
 929      "cost_per_1m_in_cached": 0,
 930      "cost_per_1m_out_cached": 0,
 931      "context_window": 130815,
 932      "default_max_tokens": 13081,
 933      "can_reason": false,
 934      "supports_attachments": false,
 935      "options": {}
 936    },
 937    {
 938      "id": "meta-llama/llama-3.1-70b-instruct",
 939      "name": "Meta: Llama 3.1 70B Instruct",
 940      "cost_per_1m_in": 0.88,
 941      "cost_per_1m_out": 0.88,
 942      "cost_per_1m_in_cached": 0,
 943      "cost_per_1m_out_cached": 0,
 944      "context_window": 131072,
 945      "default_max_tokens": 13107,
 946      "can_reason": false,
 947      "supports_attachments": false,
 948      "options": {}
 949    },
 950    {
 951      "id": "meta-llama/llama-3.1-8b-instruct",
 952      "name": "Meta: Llama 3.1 8B Instruct",
 953      "cost_per_1m_in": 0.02,
 954      "cost_per_1m_out": 0.03,
 955      "cost_per_1m_in_cached": 0,
 956      "cost_per_1m_out_cached": 0,
 957      "context_window": 131072,
 958      "default_max_tokens": 8192,
 959      "can_reason": false,
 960      "supports_attachments": false,
 961      "options": {}
 962    },
 963    {
 964      "id": "meta-llama/llama-3.2-3b-instruct",
 965      "name": "Meta: Llama 3.2 3B Instruct",
 966      "cost_per_1m_in": 0.024,
 967      "cost_per_1m_out": 0.04,
 968      "cost_per_1m_in_cached": 0,
 969      "cost_per_1m_out_cached": 0,
 970      "context_window": 32768,
 971      "default_max_tokens": 16000,
 972      "can_reason": false,
 973      "supports_attachments": false,
 974      "options": {}
 975    },
 976    {
 977      "id": "meta-llama/llama-3.3-70b-instruct",
 978      "name": "Meta: Llama 3.3 70B Instruct",
 979      "cost_per_1m_in": 0.13,
 980      "cost_per_1m_out": 0.38,
 981      "cost_per_1m_in_cached": 0,
 982      "cost_per_1m_out_cached": 0,
 983      "context_window": 131072,
 984      "default_max_tokens": 8192,
 985      "can_reason": false,
 986      "supports_attachments": false,
 987      "options": {}
 988    },
 989    {
 990      "id": "meta-llama/llama-3.3-70b-instruct:free",
 991      "name": "Meta: Llama 3.3 70B Instruct (free)",
 992      "cost_per_1m_in": 0,
 993      "cost_per_1m_out": 0,
 994      "cost_per_1m_in_cached": 0,
 995      "cost_per_1m_out_cached": 0,
 996      "context_window": 131072,
 997      "default_max_tokens": 13107,
 998      "can_reason": false,
 999      "supports_attachments": false,
1000      "options": {}
1001    },
1002    {
1003      "id": "meta-llama/llama-4-maverick",
1004      "name": "Meta: Llama 4 Maverick",
1005      "cost_per_1m_in": 0.27,
1006      "cost_per_1m_out": 0.85,
1007      "cost_per_1m_in_cached": 0,
1008      "cost_per_1m_out_cached": 0,
1009      "context_window": 1048576,
1010      "default_max_tokens": 104857,
1011      "can_reason": false,
1012      "supports_attachments": true,
1013      "options": {}
1014    },
1015    {
1016      "id": "meta-llama/llama-4-scout",
1017      "name": "Meta: Llama 4 Scout",
1018      "cost_per_1m_in": 0.25,
1019      "cost_per_1m_out": 0.7,
1020      "cost_per_1m_in_cached": 0,
1021      "cost_per_1m_out_cached": 0,
1022      "context_window": 1310720,
1023      "default_max_tokens": 4096,
1024      "can_reason": false,
1025      "supports_attachments": true,
1026      "options": {}
1027    },
1028    {
1029      "id": "microsoft/phi-3-medium-128k-instruct",
1030      "name": "Microsoft: Phi-3 Medium 128K Instruct",
1031      "cost_per_1m_in": 1,
1032      "cost_per_1m_out": 1,
1033      "cost_per_1m_in_cached": 0,
1034      "cost_per_1m_out_cached": 0,
1035      "context_window": 128000,
1036      "default_max_tokens": 12800,
1037      "can_reason": false,
1038      "supports_attachments": false,
1039      "options": {}
1040    },
1041    {
1042      "id": "microsoft/phi-3-mini-128k-instruct",
1043      "name": "Microsoft: Phi-3 Mini 128K Instruct",
1044      "cost_per_1m_in": 0.09999999999999999,
1045      "cost_per_1m_out": 0.09999999999999999,
1046      "cost_per_1m_in_cached": 0,
1047      "cost_per_1m_out_cached": 0,
1048      "context_window": 128000,
1049      "default_max_tokens": 12800,
1050      "can_reason": false,
1051      "supports_attachments": false,
1052      "options": {}
1053    },
1054    {
1055      "id": "microsoft/phi-3.5-mini-128k-instruct",
1056      "name": "Microsoft: Phi-3.5 Mini 128K Instruct",
1057      "cost_per_1m_in": 0.09999999999999999,
1058      "cost_per_1m_out": 0.09999999999999999,
1059      "cost_per_1m_in_cached": 0,
1060      "cost_per_1m_out_cached": 0,
1061      "context_window": 128000,
1062      "default_max_tokens": 12800,
1063      "can_reason": false,
1064      "supports_attachments": false,
1065      "options": {}
1066    },
1067    {
1068      "id": "minimax/minimax-m2",
1069      "name": "MiniMax: MiniMax M2",
1070      "cost_per_1m_in": 0.255,
1071      "cost_per_1m_out": 1.02,
1072      "cost_per_1m_in_cached": 0,
1073      "cost_per_1m_out_cached": 0,
1074      "context_window": 204800,
1075      "default_max_tokens": 65536,
1076      "can_reason": true,
1077      "reasoning_levels": [
1078        "low",
1079        "medium",
1080        "high"
1081      ],
1082      "default_reasoning_effort": "medium",
1083      "supports_attachments": false,
1084      "options": {}
1085    },
1086    {
1087      "id": "mistralai/mistral-large",
1088      "name": "Mistral Large",
1089      "cost_per_1m_in": 2,
1090      "cost_per_1m_out": 6,
1091      "cost_per_1m_in_cached": 0,
1092      "cost_per_1m_out_cached": 0,
1093      "context_window": 128000,
1094      "default_max_tokens": 12800,
1095      "can_reason": false,
1096      "supports_attachments": false,
1097      "options": {}
1098    },
1099    {
1100      "id": "mistralai/mistral-large-2407",
1101      "name": "Mistral Large 2407",
1102      "cost_per_1m_in": 2,
1103      "cost_per_1m_out": 6,
1104      "cost_per_1m_in_cached": 0,
1105      "cost_per_1m_out_cached": 0,
1106      "context_window": 131072,
1107      "default_max_tokens": 13107,
1108      "can_reason": false,
1109      "supports_attachments": false,
1110      "options": {}
1111    },
1112    {
1113      "id": "mistralai/mistral-large-2411",
1114      "name": "Mistral Large 2411",
1115      "cost_per_1m_in": 2,
1116      "cost_per_1m_out": 6,
1117      "cost_per_1m_in_cached": 0,
1118      "cost_per_1m_out_cached": 0,
1119      "context_window": 131072,
1120      "default_max_tokens": 13107,
1121      "can_reason": false,
1122      "supports_attachments": false,
1123      "options": {}
1124    },
1125    {
1126      "id": "mistralai/mistral-small",
1127      "name": "Mistral Small",
1128      "cost_per_1m_in": 0.19999999999999998,
1129      "cost_per_1m_out": 0.6,
1130      "cost_per_1m_in_cached": 0,
1131      "cost_per_1m_out_cached": 0,
1132      "context_window": 32768,
1133      "default_max_tokens": 3276,
1134      "can_reason": false,
1135      "supports_attachments": false,
1136      "options": {}
1137    },
1138    {
1139      "id": "mistralai/mistral-tiny",
1140      "name": "Mistral Tiny",
1141      "cost_per_1m_in": 0.25,
1142      "cost_per_1m_out": 0.25,
1143      "cost_per_1m_in_cached": 0,
1144      "cost_per_1m_out_cached": 0,
1145      "context_window": 32768,
1146      "default_max_tokens": 3276,
1147      "can_reason": false,
1148      "supports_attachments": false,
1149      "options": {}
1150    },
1151    {
1152      "id": "mistralai/codestral-2501",
1153      "name": "Mistral: Codestral 2501",
1154      "cost_per_1m_in": 0.3,
1155      "cost_per_1m_out": 0.8999999999999999,
1156      "cost_per_1m_in_cached": 0,
1157      "cost_per_1m_out_cached": 0,
1158      "context_window": 256000,
1159      "default_max_tokens": 25600,
1160      "can_reason": false,
1161      "supports_attachments": false,
1162      "options": {}
1163    },
1164    {
1165      "id": "mistralai/codestral-2508",
1166      "name": "Mistral: Codestral 2508",
1167      "cost_per_1m_in": 0.3,
1168      "cost_per_1m_out": 0.8999999999999999,
1169      "cost_per_1m_in_cached": 0,
1170      "cost_per_1m_out_cached": 0,
1171      "context_window": 256000,
1172      "default_max_tokens": 25600,
1173      "can_reason": false,
1174      "supports_attachments": false,
1175      "options": {}
1176    },
1177    {
1178      "id": "mistralai/devstral-medium",
1179      "name": "Mistral: Devstral Medium",
1180      "cost_per_1m_in": 0.39999999999999997,
1181      "cost_per_1m_out": 2,
1182      "cost_per_1m_in_cached": 0,
1183      "cost_per_1m_out_cached": 0,
1184      "context_window": 131072,
1185      "default_max_tokens": 13107,
1186      "can_reason": false,
1187      "supports_attachments": false,
1188      "options": {}
1189    },
1190    {
1191      "id": "mistralai/devstral-small",
1192      "name": "Mistral: Devstral Small 1.1",
1193      "cost_per_1m_in": 0.09999999999999999,
1194      "cost_per_1m_out": 0.3,
1195      "cost_per_1m_in_cached": 0,
1196      "cost_per_1m_out_cached": 0,
1197      "context_window": 131072,
1198      "default_max_tokens": 13107,
1199      "can_reason": false,
1200      "supports_attachments": false,
1201      "options": {}
1202    },
1203    {
1204      "id": "mistralai/magistral-medium-2506",
1205      "name": "Mistral: Magistral Medium 2506",
1206      "cost_per_1m_in": 2,
1207      "cost_per_1m_out": 5,
1208      "cost_per_1m_in_cached": 0,
1209      "cost_per_1m_out_cached": 0,
1210      "context_window": 40960,
1211      "default_max_tokens": 20000,
1212      "can_reason": true,
1213      "reasoning_levels": [
1214        "low",
1215        "medium",
1216        "high"
1217      ],
1218      "default_reasoning_effort": "medium",
1219      "supports_attachments": false,
1220      "options": {}
1221    },
1222    {
1223      "id": "mistralai/magistral-medium-2506:thinking",
1224      "name": "Mistral: Magistral Medium 2506 (thinking)",
1225      "cost_per_1m_in": 2,
1226      "cost_per_1m_out": 5,
1227      "cost_per_1m_in_cached": 0,
1228      "cost_per_1m_out_cached": 0,
1229      "context_window": 40960,
1230      "default_max_tokens": 20000,
1231      "can_reason": true,
1232      "reasoning_levels": [
1233        "low",
1234        "medium",
1235        "high"
1236      ],
1237      "default_reasoning_effort": "medium",
1238      "supports_attachments": false,
1239      "options": {}
1240    },
1241    {
1242      "id": "mistralai/magistral-small-2506",
1243      "name": "Mistral: Magistral Small 2506",
1244      "cost_per_1m_in": 0.5,
1245      "cost_per_1m_out": 1.5,
1246      "cost_per_1m_in_cached": 0,
1247      "cost_per_1m_out_cached": 0,
1248      "context_window": 40000,
1249      "default_max_tokens": 20000,
1250      "can_reason": true,
1251      "reasoning_levels": [
1252        "low",
1253        "medium",
1254        "high"
1255      ],
1256      "default_reasoning_effort": "medium",
1257      "supports_attachments": false,
1258      "options": {}
1259    },
1260    {
1261      "id": "mistralai/ministral-3b",
1262      "name": "Mistral: Ministral 3B",
1263      "cost_per_1m_in": 0.04,
1264      "cost_per_1m_out": 0.04,
1265      "cost_per_1m_in_cached": 0,
1266      "cost_per_1m_out_cached": 0,
1267      "context_window": 131072,
1268      "default_max_tokens": 13107,
1269      "can_reason": false,
1270      "supports_attachments": false,
1271      "options": {}
1272    },
1273    {
1274      "id": "mistralai/ministral-8b",
1275      "name": "Mistral: Ministral 8B",
1276      "cost_per_1m_in": 0.09999999999999999,
1277      "cost_per_1m_out": 0.09999999999999999,
1278      "cost_per_1m_in_cached": 0,
1279      "cost_per_1m_out_cached": 0,
1280      "context_window": 131072,
1281      "default_max_tokens": 13107,
1282      "can_reason": false,
1283      "supports_attachments": false,
1284      "options": {}
1285    },
1286    {
1287      "id": "mistralai/mistral-7b-instruct",
1288      "name": "Mistral: Mistral 7B Instruct",
1289      "cost_per_1m_in": 0.028,
1290      "cost_per_1m_out": 0.054,
1291      "cost_per_1m_in_cached": 0,
1292      "cost_per_1m_out_cached": 0,
1293      "context_window": 32768,
1294      "default_max_tokens": 8192,
1295      "can_reason": false,
1296      "supports_attachments": false,
1297      "options": {}
1298    },
1299    {
1300      "id": "mistralai/mistral-7b-instruct:free",
1301      "name": "Mistral: Mistral 7B Instruct (free)",
1302      "cost_per_1m_in": 0,
1303      "cost_per_1m_out": 0,
1304      "cost_per_1m_in_cached": 0,
1305      "cost_per_1m_out_cached": 0,
1306      "context_window": 32768,
1307      "default_max_tokens": 8192,
1308      "can_reason": false,
1309      "supports_attachments": false,
1310      "options": {}
1311    },
1312    {
1313      "id": "mistralai/mistral-medium-3",
1314      "name": "Mistral: Mistral Medium 3",
1315      "cost_per_1m_in": 0.39999999999999997,
1316      "cost_per_1m_out": 2,
1317      "cost_per_1m_in_cached": 0,
1318      "cost_per_1m_out_cached": 0,
1319      "context_window": 131072,
1320      "default_max_tokens": 13107,
1321      "can_reason": false,
1322      "supports_attachments": true,
1323      "options": {}
1324    },
1325    {
1326      "id": "mistralai/mistral-medium-3.1",
1327      "name": "Mistral: Mistral Medium 3.1",
1328      "cost_per_1m_in": 0.39999999999999997,
1329      "cost_per_1m_out": 2,
1330      "cost_per_1m_in_cached": 0,
1331      "cost_per_1m_out_cached": 0,
1332      "context_window": 131072,
1333      "default_max_tokens": 13107,
1334      "can_reason": false,
1335      "supports_attachments": true,
1336      "options": {}
1337    },
1338    {
1339      "id": "mistralai/mistral-nemo",
1340      "name": "Mistral: Mistral Nemo",
1341      "cost_per_1m_in": 0.15,
1342      "cost_per_1m_out": 0.15,
1343      "cost_per_1m_in_cached": 0,
1344      "cost_per_1m_out_cached": 0,
1345      "context_window": 131072,
1346      "default_max_tokens": 13107,
1347      "can_reason": false,
1348      "supports_attachments": false,
1349      "options": {}
1350    },
1351    {
1352      "id": "mistralai/mistral-small-24b-instruct-2501",
1353      "name": "Mistral: Mistral Small 3",
1354      "cost_per_1m_in": 0.7999999999999999,
1355      "cost_per_1m_out": 0.7999999999999999,
1356      "cost_per_1m_in_cached": 0,
1357      "cost_per_1m_out_cached": 0,
1358      "context_window": 32768,
1359      "default_max_tokens": 1024,
1360      "can_reason": false,
1361      "supports_attachments": false,
1362      "options": {}
1363    },
1364    {
1365      "id": "mistralai/mistral-small-3.1-24b-instruct",
1366      "name": "Mistral: Mistral Small 3.1 24B",
1367      "cost_per_1m_in": 0.09999999999999999,
1368      "cost_per_1m_out": 0.3,
1369      "cost_per_1m_in_cached": 0,
1370      "cost_per_1m_out_cached": 0,
1371      "context_window": 131072,
1372      "default_max_tokens": 13107,
1373      "can_reason": false,
1374      "supports_attachments": true,
1375      "options": {}
1376    },
1377    {
1378      "id": "mistralai/mistral-small-3.1-24b-instruct:free",
1379      "name": "Mistral: Mistral Small 3.1 24B (free)",
1380      "cost_per_1m_in": 0,
1381      "cost_per_1m_out": 0,
1382      "cost_per_1m_in_cached": 0,
1383      "cost_per_1m_out_cached": 0,
1384      "context_window": 128000,
1385      "default_max_tokens": 12800,
1386      "can_reason": false,
1387      "supports_attachments": true,
1388      "options": {}
1389    },
1390    {
1391      "id": "mistralai/mistral-small-3.2-24b-instruct",
1392      "name": "Mistral: Mistral Small 3.2 24B",
1393      "cost_per_1m_in": 0.09999999999999999,
1394      "cost_per_1m_out": 0.3,
1395      "cost_per_1m_in_cached": 0,
1396      "cost_per_1m_out_cached": 0,
1397      "context_window": 131072,
1398      "default_max_tokens": 13107,
1399      "can_reason": false,
1400      "supports_attachments": true,
1401      "options": {}
1402    },
1403    {
1404      "id": "mistralai/mixtral-8x22b-instruct",
1405      "name": "Mistral: Mixtral 8x22B Instruct",
1406      "cost_per_1m_in": 2,
1407      "cost_per_1m_out": 6,
1408      "cost_per_1m_in_cached": 0,
1409      "cost_per_1m_out_cached": 0,
1410      "context_window": 65536,
1411      "default_max_tokens": 6553,
1412      "can_reason": false,
1413      "supports_attachments": false,
1414      "options": {}
1415    },
1416    {
1417      "id": "mistralai/mixtral-8x7b-instruct",
1418      "name": "Mistral: Mixtral 8x7B Instruct",
1419      "cost_per_1m_in": 0.54,
1420      "cost_per_1m_out": 0.54,
1421      "cost_per_1m_in_cached": 0,
1422      "cost_per_1m_out_cached": 0,
1423      "context_window": 32768,
1424      "default_max_tokens": 8192,
1425      "can_reason": false,
1426      "supports_attachments": false,
1427      "options": {}
1428    },
1429    {
1430      "id": "mistralai/pixtral-large-2411",
1431      "name": "Mistral: Pixtral Large 2411",
1432      "cost_per_1m_in": 2,
1433      "cost_per_1m_out": 6,
1434      "cost_per_1m_in_cached": 0,
1435      "cost_per_1m_out_cached": 0,
1436      "context_window": 131072,
1437      "default_max_tokens": 13107,
1438      "can_reason": false,
1439      "supports_attachments": true,
1440      "options": {}
1441    },
1442    {
1443      "id": "mistralai/mistral-saba",
1444      "name": "Mistral: Saba",
1445      "cost_per_1m_in": 0.19999999999999998,
1446      "cost_per_1m_out": 0.6,
1447      "cost_per_1m_in_cached": 0,
1448      "cost_per_1m_out_cached": 0,
1449      "context_window": 32768,
1450      "default_max_tokens": 3276,
1451      "can_reason": false,
1452      "supports_attachments": false,
1453      "options": {}
1454    },
1455    {
1456      "id": "mistralai/voxtral-small-24b-2507",
1457      "name": "Mistral: Voxtral Small 24B 2507",
1458      "cost_per_1m_in": 0.09999999999999999,
1459      "cost_per_1m_out": 0.3,
1460      "cost_per_1m_in_cached": 0,
1461      "cost_per_1m_out_cached": 0,
1462      "context_window": 32000,
1463      "default_max_tokens": 3200,
1464      "can_reason": false,
1465      "supports_attachments": false,
1466      "options": {}
1467    },
1468    {
1469      "id": "moonshotai/kimi-k2",
1470      "name": "MoonshotAI: Kimi K2 0711",
1471      "cost_per_1m_in": 0.5,
1472      "cost_per_1m_out": 2.4,
1473      "cost_per_1m_in_cached": 0,
1474      "cost_per_1m_out_cached": 0,
1475      "context_window": 131072,
1476      "default_max_tokens": 13107,
1477      "can_reason": false,
1478      "supports_attachments": false,
1479      "options": {}
1480    },
1481    {
1482      "id": "moonshotai/kimi-k2-0905",
1483      "name": "MoonshotAI: Kimi K2 0905",
1484      "cost_per_1m_in": 0.39,
1485      "cost_per_1m_out": 1.9,
1486      "cost_per_1m_in_cached": 0,
1487      "cost_per_1m_out_cached": 0,
1488      "context_window": 262144,
1489      "default_max_tokens": 131072,
1490      "can_reason": false,
1491      "supports_attachments": false,
1492      "options": {}
1493    },
1494    {
1495      "id": "moonshotai/kimi-k2-0905:exacto",
1496      "name": "MoonshotAI: Kimi K2 0905 (exacto)",
1497      "cost_per_1m_in": 0.6,
1498      "cost_per_1m_out": 2.5,
1499      "cost_per_1m_in_cached": 0,
1500      "cost_per_1m_out_cached": 0,
1501      "context_window": 262144,
1502      "default_max_tokens": 26214,
1503      "can_reason": false,
1504      "supports_attachments": false,
1505      "options": {}
1506    },
1507    {
1508      "id": "moonshotai/kimi-k2-thinking",
1509      "name": "MoonshotAI: Kimi K2 Thinking",
1510      "cost_per_1m_in": 0.55,
1511      "cost_per_1m_out": 2.5,
1512      "cost_per_1m_in_cached": 0,
1513      "cost_per_1m_out_cached": 0,
1514      "context_window": 262144,
1515      "default_max_tokens": 131072,
1516      "can_reason": true,
1517      "reasoning_levels": [
1518        "low",
1519        "medium",
1520        "high"
1521      ],
1522      "default_reasoning_effort": "medium",
1523      "supports_attachments": false,
1524      "options": {}
1525    },
1526    {
1527      "id": "nvidia/llama-3.1-nemotron-70b-instruct",
1528      "name": "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
1529      "cost_per_1m_in": 1.2,
1530      "cost_per_1m_out": 1.2,
1531      "cost_per_1m_in_cached": 0,
1532      "cost_per_1m_out_cached": 0,
1533      "context_window": 131072,
1534      "default_max_tokens": 8192,
1535      "can_reason": false,
1536      "supports_attachments": false,
1537      "options": {}
1538    },
1539    {
1540      "id": "nvidia/llama-3.3-nemotron-super-49b-v1.5",
1541      "name": "NVIDIA: Llama 3.3 Nemotron Super 49B V1.5",
1542      "cost_per_1m_in": 0.09999999999999999,
1543      "cost_per_1m_out": 0.39999999999999997,
1544      "cost_per_1m_in_cached": 0,
1545      "cost_per_1m_out_cached": 0,
1546      "context_window": 131072,
1547      "default_max_tokens": 13107,
1548      "can_reason": true,
1549      "reasoning_levels": [
1550        "low",
1551        "medium",
1552        "high"
1553      ],
1554      "default_reasoning_effort": "medium",
1555      "supports_attachments": false,
1556      "options": {}
1557    },
1558    {
1559      "id": "nvidia/nemotron-nano-12b-v2-vl:free",
1560      "name": "NVIDIA: Nemotron Nano 12B 2 VL (free)",
1561      "cost_per_1m_in": 0,
1562      "cost_per_1m_out": 0,
1563      "cost_per_1m_in_cached": 0,
1564      "cost_per_1m_out_cached": 0,
1565      "context_window": 128000,
1566      "default_max_tokens": 64000,
1567      "can_reason": true,
1568      "reasoning_levels": [
1569        "low",
1570        "medium",
1571        "high"
1572      ],
1573      "default_reasoning_effort": "medium",
1574      "supports_attachments": true,
1575      "options": {}
1576    },
1577    {
1578      "id": "nvidia/nemotron-nano-9b-v2",
1579      "name": "NVIDIA: Nemotron Nano 9B V2",
1580      "cost_per_1m_in": 0.04,
1581      "cost_per_1m_out": 0.16,
1582      "cost_per_1m_in_cached": 0,
1583      "cost_per_1m_out_cached": 0,
1584      "context_window": 131072,
1585      "default_max_tokens": 13107,
1586      "can_reason": true,
1587      "reasoning_levels": [
1588        "low",
1589        "medium",
1590        "high"
1591      ],
1592      "default_reasoning_effort": "medium",
1593      "supports_attachments": false,
1594      "options": {}
1595    },
1596    {
1597      "id": "nvidia/nemotron-nano-9b-v2:free",
1598      "name": "NVIDIA: Nemotron Nano 9B V2 (free)",
1599      "cost_per_1m_in": 0,
1600      "cost_per_1m_out": 0,
1601      "cost_per_1m_in_cached": 0,
1602      "cost_per_1m_out_cached": 0,
1603      "context_window": 128000,
1604      "default_max_tokens": 12800,
1605      "can_reason": true,
1606      "reasoning_levels": [
1607        "low",
1608        "medium",
1609        "high"
1610      ],
1611      "default_reasoning_effort": "medium",
1612      "supports_attachments": false,
1613      "options": {}
1614    },
1615    {
1616      "id": "nousresearch/deephermes-3-mistral-24b-preview",
1617      "name": "Nous: DeepHermes 3 Mistral 24B Preview",
1618      "cost_per_1m_in": 0.049999999999999996,
1619      "cost_per_1m_out": 0.19999999999999998,
1620      "cost_per_1m_in_cached": 0,
1621      "cost_per_1m_out_cached": 0,
1622      "context_window": 32768,
1623      "default_max_tokens": 16384,
1624      "can_reason": true,
1625      "reasoning_levels": [
1626        "low",
1627        "medium",
1628        "high"
1629      ],
1630      "default_reasoning_effort": "medium",
1631      "supports_attachments": false,
1632      "options": {}
1633    },
1634    {
1635      "id": "nousresearch/hermes-4-405b",
1636      "name": "Nous: Hermes 4 405B",
1637      "cost_per_1m_in": 0.3,
1638      "cost_per_1m_out": 1.2,
1639      "cost_per_1m_in_cached": 0,
1640      "cost_per_1m_out_cached": 0,
1641      "context_window": 131072,
1642      "default_max_tokens": 65536,
1643      "can_reason": true,
1644      "reasoning_levels": [
1645        "low",
1646        "medium",
1647        "high"
1648      ],
1649      "default_reasoning_effort": "medium",
1650      "supports_attachments": false,
1651      "options": {}
1652    },
1653    {
1654      "id": "openai/codex-mini",
1655      "name": "OpenAI: Codex Mini",
1656      "cost_per_1m_in": 1.5,
1657      "cost_per_1m_out": 6,
1658      "cost_per_1m_in_cached": 0,
1659      "cost_per_1m_out_cached": 0.375,
1660      "context_window": 200000,
1661      "default_max_tokens": 50000,
1662      "can_reason": true,
1663      "reasoning_levels": [
1664        "low",
1665        "medium",
1666        "high"
1667      ],
1668      "default_reasoning_effort": "medium",
1669      "supports_attachments": true,
1670      "options": {}
1671    },
1672    {
1673      "id": "openai/gpt-4-turbo",
1674      "name": "OpenAI: GPT-4 Turbo",
1675      "cost_per_1m_in": 10,
1676      "cost_per_1m_out": 30,
1677      "cost_per_1m_in_cached": 0,
1678      "cost_per_1m_out_cached": 0,
1679      "context_window": 128000,
1680      "default_max_tokens": 2048,
1681      "can_reason": false,
1682      "supports_attachments": true,
1683      "options": {}
1684    },
1685    {
1686      "id": "openai/gpt-4-1106-preview",
1687      "name": "OpenAI: GPT-4 Turbo (older v1106)",
1688      "cost_per_1m_in": 10,
1689      "cost_per_1m_out": 30,
1690      "cost_per_1m_in_cached": 0,
1691      "cost_per_1m_out_cached": 0,
1692      "context_window": 128000,
1693      "default_max_tokens": 2048,
1694      "can_reason": false,
1695      "supports_attachments": false,
1696      "options": {}
1697    },
1698    {
1699      "id": "openai/gpt-4-turbo-preview",
1700      "name": "OpenAI: GPT-4 Turbo Preview",
1701      "cost_per_1m_in": 10,
1702      "cost_per_1m_out": 30,
1703      "cost_per_1m_in_cached": 0,
1704      "cost_per_1m_out_cached": 0,
1705      "context_window": 128000,
1706      "default_max_tokens": 2048,
1707      "can_reason": false,
1708      "supports_attachments": false,
1709      "options": {}
1710    },
1711    {
1712      "id": "openai/gpt-4.1",
1713      "name": "OpenAI: GPT-4.1",
1714      "cost_per_1m_in": 2,
1715      "cost_per_1m_out": 8,
1716      "cost_per_1m_in_cached": 0,
1717      "cost_per_1m_out_cached": 0.5,
1718      "context_window": 1047576,
1719      "default_max_tokens": 104757,
1720      "can_reason": false,
1721      "supports_attachments": true,
1722      "options": {}
1723    },
1724    {
1725      "id": "openai/gpt-4.1-mini",
1726      "name": "OpenAI: GPT-4.1 Mini",
1727      "cost_per_1m_in": 0.39999999999999997,
1728      "cost_per_1m_out": 1.5999999999999999,
1729      "cost_per_1m_in_cached": 0,
1730      "cost_per_1m_out_cached": 0.09999999999999999,
1731      "context_window": 1047576,
1732      "default_max_tokens": 104757,
1733      "can_reason": false,
1734      "supports_attachments": true,
1735      "options": {}
1736    },
1737    {
1738      "id": "openai/gpt-4.1-nano",
1739      "name": "OpenAI: GPT-4.1 Nano",
1740      "cost_per_1m_in": 0.09999999999999999,
1741      "cost_per_1m_out": 0.39999999999999997,
1742      "cost_per_1m_in_cached": 0,
1743      "cost_per_1m_out_cached": 0.03,
1744      "context_window": 1047576,
1745      "default_max_tokens": 104757,
1746      "can_reason": false,
1747      "supports_attachments": true,
1748      "options": {}
1749    },
1750    {
1751      "id": "openai/gpt-4o",
1752      "name": "OpenAI: GPT-4o",
1753      "cost_per_1m_in": 2.5,
1754      "cost_per_1m_out": 10,
1755      "cost_per_1m_in_cached": 0,
1756      "cost_per_1m_out_cached": 0,
1757      "context_window": 128000,
1758      "default_max_tokens": 8192,
1759      "can_reason": false,
1760      "supports_attachments": true,
1761      "options": {}
1762    },
1763    {
1764      "id": "openai/gpt-4o-2024-05-13",
1765      "name": "OpenAI: GPT-4o (2024-05-13)",
1766      "cost_per_1m_in": 5,
1767      "cost_per_1m_out": 15,
1768      "cost_per_1m_in_cached": 0,
1769      "cost_per_1m_out_cached": 0,
1770      "context_window": 128000,
1771      "default_max_tokens": 2048,
1772      "can_reason": false,
1773      "supports_attachments": true,
1774      "options": {}
1775    },
1776    {
1777      "id": "openai/gpt-4o-2024-08-06",
1778      "name": "OpenAI: GPT-4o (2024-08-06)",
1779      "cost_per_1m_in": 2.5,
1780      "cost_per_1m_out": 10,
1781      "cost_per_1m_in_cached": 0,
1782      "cost_per_1m_out_cached": 1.25,
1783      "context_window": 128000,
1784      "default_max_tokens": 8192,
1785      "can_reason": false,
1786      "supports_attachments": true,
1787      "options": {}
1788    },
1789    {
1790      "id": "openai/gpt-4o-2024-11-20",
1791      "name": "OpenAI: GPT-4o (2024-11-20)",
1792      "cost_per_1m_in": 2.5,
1793      "cost_per_1m_out": 10,
1794      "cost_per_1m_in_cached": 0,
1795      "cost_per_1m_out_cached": 1.25,
1796      "context_window": 128000,
1797      "default_max_tokens": 8192,
1798      "can_reason": false,
1799      "supports_attachments": true,
1800      "options": {}
1801    },
1802    {
1803      "id": "openai/gpt-4o:extended",
1804      "name": "OpenAI: GPT-4o (extended)",
1805      "cost_per_1m_in": 6,
1806      "cost_per_1m_out": 18,
1807      "cost_per_1m_in_cached": 0,
1808      "cost_per_1m_out_cached": 0,
1809      "context_window": 128000,
1810      "default_max_tokens": 32000,
1811      "can_reason": false,
1812      "supports_attachments": true,
1813      "options": {}
1814    },
1815    {
1816      "id": "openai/gpt-4o-audio-preview",
1817      "name": "OpenAI: GPT-4o Audio",
1818      "cost_per_1m_in": 2.5,
1819      "cost_per_1m_out": 10,
1820      "cost_per_1m_in_cached": 0,
1821      "cost_per_1m_out_cached": 0,
1822      "context_window": 128000,
1823      "default_max_tokens": 8192,
1824      "can_reason": false,
1825      "supports_attachments": false,
1826      "options": {}
1827    },
1828    {
1829      "id": "openai/gpt-4o-mini",
1830      "name": "OpenAI: GPT-4o-mini",
1831      "cost_per_1m_in": 0.15,
1832      "cost_per_1m_out": 0.6,
1833      "cost_per_1m_in_cached": 0,
1834      "cost_per_1m_out_cached": 0.075,
1835      "context_window": 128000,
1836      "default_max_tokens": 8192,
1837      "can_reason": false,
1838      "supports_attachments": true,
1839      "options": {}
1840    },
1841    {
1842      "id": "openai/gpt-4o-mini-2024-07-18",
1843      "name": "OpenAI: GPT-4o-mini (2024-07-18)",
1844      "cost_per_1m_in": 0.15,
1845      "cost_per_1m_out": 0.6,
1846      "cost_per_1m_in_cached": 0,
1847      "cost_per_1m_out_cached": 0.075,
1848      "context_window": 128000,
1849      "default_max_tokens": 8192,
1850      "can_reason": false,
1851      "supports_attachments": true,
1852      "options": {}
1853    },
1854    {
1855      "id": "openai/gpt-5",
1856      "name": "OpenAI: GPT-5",
1857      "cost_per_1m_in": 1.25,
1858      "cost_per_1m_out": 10,
1859      "cost_per_1m_in_cached": 0,
1860      "cost_per_1m_out_cached": 0.125,
1861      "context_window": 400000,
1862      "default_max_tokens": 64000,
1863      "can_reason": true,
1864      "reasoning_levels": [
1865        "low",
1866        "medium",
1867        "high"
1868      ],
1869      "default_reasoning_effort": "medium",
1870      "supports_attachments": true,
1871      "options": {}
1872    },
1873    {
1874      "id": "openai/gpt-5-codex",
1875      "name": "OpenAI: GPT-5 Codex",
1876      "cost_per_1m_in": 1.25,
1877      "cost_per_1m_out": 10,
1878      "cost_per_1m_in_cached": 0,
1879      "cost_per_1m_out_cached": 0.125,
1880      "context_window": 400000,
1881      "default_max_tokens": 64000,
1882      "can_reason": true,
1883      "reasoning_levels": [
1884        "low",
1885        "medium",
1886        "high"
1887      ],
1888      "default_reasoning_effort": "medium",
1889      "supports_attachments": true,
1890      "options": {}
1891    },
1892    {
1893      "id": "openai/gpt-5-image",
1894      "name": "OpenAI: GPT-5 Image",
1895      "cost_per_1m_in": 10,
1896      "cost_per_1m_out": 10,
1897      "cost_per_1m_in_cached": 0,
1898      "cost_per_1m_out_cached": 1.25,
1899      "context_window": 400000,
1900      "default_max_tokens": 64000,
1901      "can_reason": true,
1902      "reasoning_levels": [
1903        "low",
1904        "medium",
1905        "high"
1906      ],
1907      "default_reasoning_effort": "medium",
1908      "supports_attachments": true,
1909      "options": {}
1910    },
1911    {
1912      "id": "openai/gpt-5-image-mini",
1913      "name": "OpenAI: GPT-5 Image Mini",
1914      "cost_per_1m_in": 2.5,
1915      "cost_per_1m_out": 2,
1916      "cost_per_1m_in_cached": 0,
1917      "cost_per_1m_out_cached": 0.25,
1918      "context_window": 400000,
1919      "default_max_tokens": 64000,
1920      "can_reason": true,
1921      "reasoning_levels": [
1922        "low",
1923        "medium",
1924        "high"
1925      ],
1926      "default_reasoning_effort": "medium",
1927      "supports_attachments": true,
1928      "options": {}
1929    },
1930    {
1931      "id": "openai/gpt-5-mini",
1932      "name": "OpenAI: GPT-5 Mini",
1933      "cost_per_1m_in": 0.25,
1934      "cost_per_1m_out": 2,
1935      "cost_per_1m_in_cached": 0,
1936      "cost_per_1m_out_cached": 0.03,
1937      "context_window": 400000,
1938      "default_max_tokens": 40000,
1939      "can_reason": true,
1940      "reasoning_levels": [
1941        "low",
1942        "medium",
1943        "high"
1944      ],
1945      "default_reasoning_effort": "medium",
1946      "supports_attachments": true,
1947      "options": {}
1948    },
1949    {
1950      "id": "openai/gpt-5-nano",
1951      "name": "OpenAI: GPT-5 Nano",
1952      "cost_per_1m_in": 0.049999999999999996,
1953      "cost_per_1m_out": 0.39999999999999997,
1954      "cost_per_1m_in_cached": 0,
1955      "cost_per_1m_out_cached": 0.01,
1956      "context_window": 400000,
1957      "default_max_tokens": 40000,
1958      "can_reason": true,
1959      "reasoning_levels": [
1960        "low",
1961        "medium",
1962        "high"
1963      ],
1964      "default_reasoning_effort": "medium",
1965      "supports_attachments": true,
1966      "options": {}
1967    },
1968    {
1969      "id": "openai/gpt-5-pro",
1970      "name": "OpenAI: GPT-5 Pro",
1971      "cost_per_1m_in": 15,
1972      "cost_per_1m_out": 120,
1973      "cost_per_1m_in_cached": 0,
1974      "cost_per_1m_out_cached": 0,
1975      "context_window": 400000,
1976      "default_max_tokens": 64000,
1977      "can_reason": true,
1978      "reasoning_levels": [
1979        "low",
1980        "medium",
1981        "high"
1982      ],
1983      "default_reasoning_effort": "medium",
1984      "supports_attachments": true,
1985      "options": {}
1986    },
1987    {
1988      "id": "openai/gpt-5.1",
1989      "name": "OpenAI: GPT-5.1",
1990      "cost_per_1m_in": 1.25,
1991      "cost_per_1m_out": 10,
1992      "cost_per_1m_in_cached": 0,
1993      "cost_per_1m_out_cached": 0.125,
1994      "context_window": 400000,
1995      "default_max_tokens": 64000,
1996      "can_reason": true,
1997      "reasoning_levels": [
1998        "low",
1999        "medium",
2000        "high"
2001      ],
2002      "default_reasoning_effort": "medium",
2003      "supports_attachments": true,
2004      "options": {}
2005    },
2006    {
2007      "id": "openai/gpt-5.1-chat",
2008      "name": "OpenAI: GPT-5.1 Chat",
2009      "cost_per_1m_in": 1.25,
2010      "cost_per_1m_out": 10,
2011      "cost_per_1m_in_cached": 0,
2012      "cost_per_1m_out_cached": 0.125,
2013      "context_window": 128000,
2014      "default_max_tokens": 8192,
2015      "can_reason": false,
2016      "supports_attachments": true,
2017      "options": {}
2018    },
2019    {
2020      "id": "openai/gpt-5.1-codex",
2021      "name": "OpenAI: GPT-5.1-Codex",
2022      "cost_per_1m_in": 1.25,
2023      "cost_per_1m_out": 10,
2024      "cost_per_1m_in_cached": 0,
2025      "cost_per_1m_out_cached": 0.125,
2026      "context_window": 400000,
2027      "default_max_tokens": 64000,
2028      "can_reason": true,
2029      "reasoning_levels": [
2030        "low",
2031        "medium",
2032        "high"
2033      ],
2034      "default_reasoning_effort": "medium",
2035      "supports_attachments": true,
2036      "options": {}
2037    },
2038    {
2039      "id": "openai/gpt-5.1-codex-mini",
2040      "name": "OpenAI: GPT-5.1-Codex-Mini",
2041      "cost_per_1m_in": 0.25,
2042      "cost_per_1m_out": 2,
2043      "cost_per_1m_in_cached": 0,
2044      "cost_per_1m_out_cached": 0.024999999999999998,
2045      "context_window": 400000,
2046      "default_max_tokens": 50000,
2047      "can_reason": true,
2048      "reasoning_levels": [
2049        "low",
2050        "medium",
2051        "high"
2052      ],
2053      "default_reasoning_effort": "medium",
2054      "supports_attachments": true,
2055      "options": {}
2056    },
2057    {
2058      "id": "openai/gpt-oss-120b",
2059      "name": "OpenAI: gpt-oss-120b",
2060      "cost_per_1m_in": 0.04,
2061      "cost_per_1m_out": 0.39999999999999997,
2062      "cost_per_1m_in_cached": 0,
2063      "cost_per_1m_out_cached": 0,
2064      "context_window": 131072,
2065      "default_max_tokens": 65536,
2066      "can_reason": true,
2067      "reasoning_levels": [
2068        "low",
2069        "medium",
2070        "high"
2071      ],
2072      "default_reasoning_effort": "medium",
2073      "supports_attachments": false,
2074      "options": {}
2075    },
2076    {
2077      "id": "openai/gpt-oss-120b:exacto",
2078      "name": "OpenAI: gpt-oss-120b (exacto)",
2079      "cost_per_1m_in": 0.04,
2080      "cost_per_1m_out": 0.19999999999999998,
2081      "cost_per_1m_in_cached": 0,
2082      "cost_per_1m_out_cached": 0,
2083      "context_window": 131072,
2084      "default_max_tokens": 16384,
2085      "can_reason": true,
2086      "reasoning_levels": [
2087        "low",
2088        "medium",
2089        "high"
2090      ],
2091      "default_reasoning_effort": "medium",
2092      "supports_attachments": false,
2093      "options": {}
2094    },
2095    {
2096      "id": "openai/gpt-oss-20b",
2097      "name": "OpenAI: gpt-oss-20b",
2098      "cost_per_1m_in": 0.03,
2099      "cost_per_1m_out": 0.14,
2100      "cost_per_1m_in_cached": 0,
2101      "cost_per_1m_out_cached": 0,
2102      "context_window": 131072,
2103      "default_max_tokens": 13107,
2104      "can_reason": true,
2105      "reasoning_levels": [
2106        "low",
2107        "medium",
2108        "high"
2109      ],
2110      "default_reasoning_effort": "medium",
2111      "supports_attachments": false,
2112      "options": {}
2113    },
2114    {
2115      "id": "openai/gpt-oss-20b:free",
2116      "name": "OpenAI: gpt-oss-20b (free)",
2117      "cost_per_1m_in": 0,
2118      "cost_per_1m_out": 0,
2119      "cost_per_1m_in_cached": 0,
2120      "cost_per_1m_out_cached": 0,
2121      "context_window": 131072,
2122      "default_max_tokens": 65536,
2123      "can_reason": true,
2124      "reasoning_levels": [
2125        "low",
2126        "medium",
2127        "high"
2128      ],
2129      "default_reasoning_effort": "medium",
2130      "supports_attachments": false,
2131      "options": {}
2132    },
2133    {
2134      "id": "openai/gpt-oss-safeguard-20b",
2135      "name": "OpenAI: gpt-oss-safeguard-20b",
2136      "cost_per_1m_in": 0.075,
2137      "cost_per_1m_out": 0.3,
2138      "cost_per_1m_in_cached": 0,
2139      "cost_per_1m_out_cached": 0.037,
2140      "context_window": 131072,
2141      "default_max_tokens": 32768,
2142      "can_reason": true,
2143      "reasoning_levels": [
2144        "low",
2145        "medium",
2146        "high"
2147      ],
2148      "default_reasoning_effort": "medium",
2149      "supports_attachments": false,
2150      "options": {}
2151    },
2152    {
2153      "id": "openai/o1",
2154      "name": "OpenAI: o1",
2155      "cost_per_1m_in": 15,
2156      "cost_per_1m_out": 60,
2157      "cost_per_1m_in_cached": 0,
2158      "cost_per_1m_out_cached": 7.5,
2159      "context_window": 200000,
2160      "default_max_tokens": 50000,
2161      "can_reason": false,
2162      "supports_attachments": true,
2163      "options": {}
2164    },
2165    {
2166      "id": "openai/o3",
2167      "name": "OpenAI: o3",
2168      "cost_per_1m_in": 2,
2169      "cost_per_1m_out": 8,
2170      "cost_per_1m_in_cached": 0,
2171      "cost_per_1m_out_cached": 0.5,
2172      "context_window": 200000,
2173      "default_max_tokens": 50000,
2174      "can_reason": true,
2175      "reasoning_levels": [
2176        "low",
2177        "medium",
2178        "high"
2179      ],
2180      "default_reasoning_effort": "medium",
2181      "supports_attachments": true,
2182      "options": {}
2183    },
2184    {
2185      "id": "openai/o3-deep-research",
2186      "name": "OpenAI: o3 Deep Research",
2187      "cost_per_1m_in": 10,
2188      "cost_per_1m_out": 40,
2189      "cost_per_1m_in_cached": 0,
2190      "cost_per_1m_out_cached": 2.5,
2191      "context_window": 200000,
2192      "default_max_tokens": 50000,
2193      "can_reason": true,
2194      "reasoning_levels": [
2195        "low",
2196        "medium",
2197        "high"
2198      ],
2199      "default_reasoning_effort": "medium",
2200      "supports_attachments": true,
2201      "options": {}
2202    },
2203    {
2204      "id": "openai/o3-mini",
2205      "name": "OpenAI: o3 Mini",
2206      "cost_per_1m_in": 1.1,
2207      "cost_per_1m_out": 4.4,
2208      "cost_per_1m_in_cached": 0,
2209      "cost_per_1m_out_cached": 0.55,
2210      "context_window": 200000,
2211      "default_max_tokens": 50000,
2212      "can_reason": false,
2213      "supports_attachments": false,
2214      "options": {}
2215    },
2216    {
2217      "id": "openai/o3-mini-high",
2218      "name": "OpenAI: o3 Mini High",
2219      "cost_per_1m_in": 1.1,
2220      "cost_per_1m_out": 4.4,
2221      "cost_per_1m_in_cached": 0,
2222      "cost_per_1m_out_cached": 0.55,
2223      "context_window": 200000,
2224      "default_max_tokens": 50000,
2225      "can_reason": false,
2226      "supports_attachments": false,
2227      "options": {}
2228    },
2229    {
2230      "id": "openai/o3-pro",
2231      "name": "OpenAI: o3 Pro",
2232      "cost_per_1m_in": 20,
2233      "cost_per_1m_out": 80,
2234      "cost_per_1m_in_cached": 0,
2235      "cost_per_1m_out_cached": 0,
2236      "context_window": 200000,
2237      "default_max_tokens": 50000,
2238      "can_reason": true,
2239      "reasoning_levels": [
2240        "low",
2241        "medium",
2242        "high"
2243      ],
2244      "default_reasoning_effort": "medium",
2245      "supports_attachments": true,
2246      "options": {}
2247    },
2248    {
2249      "id": "openai/o4-mini",
2250      "name": "OpenAI: o4 Mini",
2251      "cost_per_1m_in": 1.1,
2252      "cost_per_1m_out": 4.4,
2253      "cost_per_1m_in_cached": 0,
2254      "cost_per_1m_out_cached": 0.275,
2255      "context_window": 200000,
2256      "default_max_tokens": 50000,
2257      "can_reason": true,
2258      "reasoning_levels": [
2259        "low",
2260        "medium",
2261        "high"
2262      ],
2263      "default_reasoning_effort": "medium",
2264      "supports_attachments": true,
2265      "options": {}
2266    },
2267    {
2268      "id": "openai/o4-mini-deep-research",
2269      "name": "OpenAI: o4 Mini Deep Research",
2270      "cost_per_1m_in": 2,
2271      "cost_per_1m_out": 8,
2272      "cost_per_1m_in_cached": 0,
2273      "cost_per_1m_out_cached": 0.5,
2274      "context_window": 200000,
2275      "default_max_tokens": 50000,
2276      "can_reason": true,
2277      "reasoning_levels": [
2278        "low",
2279        "medium",
2280        "high"
2281      ],
2282      "default_reasoning_effort": "medium",
2283      "supports_attachments": true,
2284      "options": {}
2285    },
2286    {
2287      "id": "openai/o4-mini-high",
2288      "name": "OpenAI: o4 Mini High",
2289      "cost_per_1m_in": 1.1,
2290      "cost_per_1m_out": 4.4,
2291      "cost_per_1m_in_cached": 0,
2292      "cost_per_1m_out_cached": 0.275,
2293      "context_window": 200000,
2294      "default_max_tokens": 50000,
2295      "can_reason": true,
2296      "reasoning_levels": [
2297        "low",
2298        "medium",
2299        "high"
2300      ],
2301      "default_reasoning_effort": "medium",
2302      "supports_attachments": true,
2303      "options": {}
2304    },
2305    {
2306      "id": "prime-intellect/intellect-3",
2307      "name": "Prime Intellect: INTELLECT-3",
2308      "cost_per_1m_in": 0.19999999999999998,
2309      "cost_per_1m_out": 1.1,
2310      "cost_per_1m_in_cached": 0,
2311      "cost_per_1m_out_cached": 0,
2312      "context_window": 131072,
2313      "default_max_tokens": 65536,
2314      "can_reason": true,
2315      "reasoning_levels": [
2316        "low",
2317        "medium",
2318        "high"
2319      ],
2320      "default_reasoning_effort": "medium",
2321      "supports_attachments": false,
2322      "options": {}
2323    },
2324    {
2325      "id": "qwen/qwen-2.5-72b-instruct",
2326      "name": "Qwen2.5 72B Instruct",
2327      "cost_per_1m_in": 0.12,
2328      "cost_per_1m_out": 0.39,
2329      "cost_per_1m_in_cached": 0,
2330      "cost_per_1m_out_cached": 0,
2331      "context_window": 32768,
2332      "default_max_tokens": 8192,
2333      "can_reason": false,
2334      "supports_attachments": false,
2335      "options": {}
2336    },
2337    {
2338      "id": "qwen/qwq-32b",
2339      "name": "Qwen: QwQ 32B",
2340      "cost_per_1m_in": 0.15,
2341      "cost_per_1m_out": 0.58,
2342      "cost_per_1m_in_cached": 0,
2343      "cost_per_1m_out_cached": 0,
2344      "context_window": 131072,
2345      "default_max_tokens": 65536,
2346      "can_reason": true,
2347      "reasoning_levels": [
2348        "low",
2349        "medium",
2350        "high"
2351      ],
2352      "default_reasoning_effort": "medium",
2353      "supports_attachments": false,
2354      "options": {}
2355    },
2356    {
2357      "id": "qwen/qwen-plus-2025-07-28",
2358      "name": "Qwen: Qwen Plus 0728",
2359      "cost_per_1m_in": 0.39999999999999997,
2360      "cost_per_1m_out": 1.2,
2361      "cost_per_1m_in_cached": 0,
2362      "cost_per_1m_out_cached": 0,
2363      "context_window": 1000000,
2364      "default_max_tokens": 16384,
2365      "can_reason": false,
2366      "supports_attachments": false,
2367      "options": {}
2368    },
2369    {
2370      "id": "qwen/qwen-plus-2025-07-28:thinking",
2371      "name": "Qwen: Qwen Plus 0728 (thinking)",
2372      "cost_per_1m_in": 0.39999999999999997,
2373      "cost_per_1m_out": 4,
2374      "cost_per_1m_in_cached": 0,
2375      "cost_per_1m_out_cached": 0,
2376      "context_window": 1000000,
2377      "default_max_tokens": 16384,
2378      "can_reason": true,
2379      "reasoning_levels": [
2380        "low",
2381        "medium",
2382        "high"
2383      ],
2384      "default_reasoning_effort": "medium",
2385      "supports_attachments": false,
2386      "options": {}
2387    },
2388    {
2389      "id": "qwen/qwen-vl-max",
2390      "name": "Qwen: Qwen VL Max",
2391      "cost_per_1m_in": 0.7999999999999999,
2392      "cost_per_1m_out": 3.1999999999999997,
2393      "cost_per_1m_in_cached": 0,
2394      "cost_per_1m_out_cached": 0,
2395      "context_window": 131072,
2396      "default_max_tokens": 4096,
2397      "can_reason": false,
2398      "supports_attachments": true,
2399      "options": {}
2400    },
2401    {
2402      "id": "qwen/qwen-max",
2403      "name": "Qwen: Qwen-Max ",
2404      "cost_per_1m_in": 1.5999999999999999,
2405      "cost_per_1m_out": 6.3999999999999995,
2406      "cost_per_1m_in_cached": 0,
2407      "cost_per_1m_out_cached": 0.64,
2408      "context_window": 32768,
2409      "default_max_tokens": 4096,
2410      "can_reason": false,
2411      "supports_attachments": false,
2412      "options": {}
2413    },
2414    {
2415      "id": "qwen/qwen-plus",
2416      "name": "Qwen: Qwen-Plus",
2417      "cost_per_1m_in": 0.39999999999999997,
2418      "cost_per_1m_out": 1.2,
2419      "cost_per_1m_in_cached": 0,
2420      "cost_per_1m_out_cached": 0.16,
2421      "context_window": 131072,
2422      "default_max_tokens": 4096,
2423      "can_reason": false,
2424      "supports_attachments": false,
2425      "options": {}
2426    },
2427    {
2428      "id": "qwen/qwen-turbo",
2429      "name": "Qwen: Qwen-Turbo",
2430      "cost_per_1m_in": 0.049999999999999996,
2431      "cost_per_1m_out": 0.19999999999999998,
2432      "cost_per_1m_in_cached": 0,
2433      "cost_per_1m_out_cached": 0.02,
2434      "context_window": 1000000,
2435      "default_max_tokens": 4096,
2436      "can_reason": false,
2437      "supports_attachments": false,
2438      "options": {}
2439    },
2440    {
2441      "id": "qwen/qwen3-14b",
2442      "name": "Qwen: Qwen3 14B",
2443      "cost_per_1m_in": 0.049999999999999996,
2444      "cost_per_1m_out": 0.22,
2445      "cost_per_1m_in_cached": 0,
2446      "cost_per_1m_out_cached": 0,
2447      "context_window": 40960,
2448      "default_max_tokens": 20480,
2449      "can_reason": true,
2450      "reasoning_levels": [
2451        "low",
2452        "medium",
2453        "high"
2454      ],
2455      "default_reasoning_effort": "medium",
2456      "supports_attachments": false,
2457      "options": {}
2458    },
2459    {
2460      "id": "qwen/qwen3-235b-a22b",
2461      "name": "Qwen: Qwen3 235B A22B",
2462      "cost_per_1m_in": 0.22,
2463      "cost_per_1m_out": 0.88,
2464      "cost_per_1m_in_cached": 0,
2465      "cost_per_1m_out_cached": 0,
2466      "context_window": 131072,
2467      "default_max_tokens": 13107,
2468      "can_reason": true,
2469      "reasoning_levels": [
2470        "low",
2471        "medium",
2472        "high"
2473      ],
2474      "default_reasoning_effort": "medium",
2475      "supports_attachments": false,
2476      "options": {}
2477    },
2478    {
2479      "id": "qwen/qwen3-235b-a22b:free",
2480      "name": "Qwen: Qwen3 235B A22B (free)",
2481      "cost_per_1m_in": 0,
2482      "cost_per_1m_out": 0,
2483      "cost_per_1m_in_cached": 0,
2484      "cost_per_1m_out_cached": 0,
2485      "context_window": 131072,
2486      "default_max_tokens": 13107,
2487      "can_reason": true,
2488      "reasoning_levels": [
2489        "low",
2490        "medium",
2491        "high"
2492      ],
2493      "default_reasoning_effort": "medium",
2494      "supports_attachments": false,
2495      "options": {}
2496    },
2497    {
2498      "id": "qwen/qwen3-235b-a22b-2507",
2499      "name": "Qwen: Qwen3 235B A22B Instruct 2507",
2500      "cost_per_1m_in": 0.09999999999999999,
2501      "cost_per_1m_out": 0.09999999999999999,
2502      "cost_per_1m_in_cached": 0,
2503      "cost_per_1m_out_cached": 0,
2504      "context_window": 262144,
2505      "default_max_tokens": 131072,
2506      "can_reason": false,
2507      "supports_attachments": false,
2508      "options": {}
2509    },
2510    {
2511      "id": "qwen/qwen3-235b-a22b-thinking-2507",
2512      "name": "Qwen: Qwen3 235B A22B Thinking 2507",
2513      "cost_per_1m_in": 0.11,
2514      "cost_per_1m_out": 0.6,
2515      "cost_per_1m_in_cached": 0,
2516      "cost_per_1m_out_cached": 0,
2517      "context_window": 262144,
2518      "default_max_tokens": 131072,
2519      "can_reason": true,
2520      "reasoning_levels": [
2521        "low",
2522        "medium",
2523        "high"
2524      ],
2525      "default_reasoning_effort": "medium",
2526      "supports_attachments": false,
2527      "options": {}
2528    },
2529    {
2530      "id": "qwen/qwen3-30b-a3b",
2531      "name": "Qwen: Qwen3 30B A3B",
2532      "cost_per_1m_in": 0.08,
2533      "cost_per_1m_out": 0.28,
2534      "cost_per_1m_in_cached": 0,
2535      "cost_per_1m_out_cached": 0,
2536      "context_window": 131072,
2537      "default_max_tokens": 65536,
2538      "can_reason": true,
2539      "reasoning_levels": [
2540        "low",
2541        "medium",
2542        "high"
2543      ],
2544      "default_reasoning_effort": "medium",
2545      "supports_attachments": false,
2546      "options": {}
2547    },
2548    {
2549      "id": "qwen/qwen3-30b-a3b-instruct-2507",
2550      "name": "Qwen: Qwen3 30B A3B Instruct 2507",
2551      "cost_per_1m_in": 0.08,
2552      "cost_per_1m_out": 0.33,
2553      "cost_per_1m_in_cached": 0,
2554      "cost_per_1m_out_cached": 0,
2555      "context_window": 262144,
2556      "default_max_tokens": 131072,
2557      "can_reason": false,
2558      "supports_attachments": false,
2559      "options": {}
2560    },
2561    {
2562      "id": "qwen/qwen3-30b-a3b-thinking-2507",
2563      "name": "Qwen: Qwen3 30B A3B Thinking 2507",
2564      "cost_per_1m_in": 0.09999999999999999,
2565      "cost_per_1m_out": 0.3,
2566      "cost_per_1m_in_cached": 0,
2567      "cost_per_1m_out_cached": 0,
2568      "context_window": 262144,
2569      "default_max_tokens": 26214,
2570      "can_reason": true,
2571      "reasoning_levels": [
2572        "low",
2573        "medium",
2574        "high"
2575      ],
2576      "default_reasoning_effort": "medium",
2577      "supports_attachments": false,
2578      "options": {}
2579    },
2580    {
2581      "id": "qwen/qwen3-32b",
2582      "name": "Qwen: Qwen3 32B",
2583      "cost_per_1m_in": 0.15,
2584      "cost_per_1m_out": 0.5,
2585      "cost_per_1m_in_cached": 0,
2586      "cost_per_1m_out_cached": 0,
2587      "context_window": 131072,
2588      "default_max_tokens": 4000,
2589      "can_reason": true,
2590      "reasoning_levels": [
2591        "low",
2592        "medium",
2593        "high"
2594      ],
2595      "default_reasoning_effort": "medium",
2596      "supports_attachments": false,
2597      "options": {}
2598    },
2599    {
2600      "id": "qwen/qwen3-4b:free",
2601      "name": "Qwen: Qwen3 4B (free)",
2602      "cost_per_1m_in": 0,
2603      "cost_per_1m_out": 0,
2604      "cost_per_1m_in_cached": 0,
2605      "cost_per_1m_out_cached": 0,
2606      "context_window": 40960,
2607      "default_max_tokens": 4096,
2608      "can_reason": true,
2609      "reasoning_levels": [
2610        "low",
2611        "medium",
2612        "high"
2613      ],
2614      "default_reasoning_effort": "medium",
2615      "supports_attachments": false,
2616      "options": {}
2617    },
2618    {
2619      "id": "qwen/qwen3-8b",
2620      "name": "Qwen: Qwen3 8B",
2621      "cost_per_1m_in": 0.2,
2622      "cost_per_1m_out": 0.2,
2623      "cost_per_1m_in_cached": 0,
2624      "cost_per_1m_out_cached": 0,
2625      "context_window": 40960,
2626      "default_max_tokens": 4096,
2627      "can_reason": true,
2628      "reasoning_levels": [
2629        "low",
2630        "medium",
2631        "high"
2632      ],
2633      "default_reasoning_effort": "medium",
2634      "supports_attachments": false,
2635      "options": {}
2636    },
2637    {
2638      "id": "qwen/qwen3-coder-30b-a3b-instruct",
2639      "name": "Qwen: Qwen3 Coder 30B A3B Instruct",
2640      "cost_per_1m_in": 0.09999999999999999,
2641      "cost_per_1m_out": 0.3,
2642      "cost_per_1m_in_cached": 0,
2643      "cost_per_1m_out_cached": 0,
2644      "context_window": 262144,
2645      "default_max_tokens": 26214,
2646      "can_reason": false,
2647      "supports_attachments": false,
2648      "options": {}
2649    },
2650    {
2651      "id": "qwen/qwen3-coder",
2652      "name": "Qwen: Qwen3 Coder 480B A35B",
2653      "cost_per_1m_in": 0.29,
2654      "cost_per_1m_out": 1.2,
2655      "cost_per_1m_in_cached": 0,
2656      "cost_per_1m_out_cached": 0,
2657      "context_window": 262144,
2658      "default_max_tokens": 26214,
2659      "can_reason": false,
2660      "supports_attachments": false,
2661      "options": {}
2662    },
2663    {
2664      "id": "qwen/qwen3-coder:exacto",
2665      "name": "Qwen: Qwen3 Coder 480B A35B (exacto)",
2666      "cost_per_1m_in": 0.38,
2667      "cost_per_1m_out": 1.53,
2668      "cost_per_1m_in_cached": 0,
2669      "cost_per_1m_out_cached": 0,
2670      "context_window": 262144,
2671      "default_max_tokens": 131072,
2672      "can_reason": true,
2673      "reasoning_levels": [
2674        "low",
2675        "medium",
2676        "high"
2677      ],
2678      "default_reasoning_effort": "medium",
2679      "supports_attachments": false,
2680      "options": {}
2681    },
2682    {
2683      "id": "qwen/qwen3-coder:free",
2684      "name": "Qwen: Qwen3 Coder 480B A35B (free)",
2685      "cost_per_1m_in": 0,
2686      "cost_per_1m_out": 0,
2687      "cost_per_1m_in_cached": 0,
2688      "cost_per_1m_out_cached": 0,
2689      "context_window": 262000,
2690      "default_max_tokens": 131000,
2691      "can_reason": false,
2692      "supports_attachments": false,
2693      "options": {}
2694    },
2695    {
2696      "id": "qwen/qwen3-coder-flash",
2697      "name": "Qwen: Qwen3 Coder Flash",
2698      "cost_per_1m_in": 0.3,
2699      "cost_per_1m_out": 1.5,
2700      "cost_per_1m_in_cached": 0,
2701      "cost_per_1m_out_cached": 0.08,
2702      "context_window": 128000,
2703      "default_max_tokens": 32768,
2704      "can_reason": false,
2705      "supports_attachments": false,
2706      "options": {}
2707    },
2708    {
2709      "id": "qwen/qwen3-coder-plus",
2710      "name": "Qwen: Qwen3 Coder Plus",
2711      "cost_per_1m_in": 1,
2712      "cost_per_1m_out": 5,
2713      "cost_per_1m_in_cached": 0,
2714      "cost_per_1m_out_cached": 0.09999999999999999,
2715      "context_window": 128000,
2716      "default_max_tokens": 32768,
2717      "can_reason": false,
2718      "supports_attachments": false,
2719      "options": {}
2720    },
2721    {
2722      "id": "qwen/qwen3-max",
2723      "name": "Qwen: Qwen3 Max",
2724      "cost_per_1m_in": 1.2,
2725      "cost_per_1m_out": 6,
2726      "cost_per_1m_in_cached": 0,
2727      "cost_per_1m_out_cached": 0.24,
2728      "context_window": 256000,
2729      "default_max_tokens": 16384,
2730      "can_reason": false,
2731      "supports_attachments": false,
2732      "options": {}
2733    },
2734    {
2735      "id": "qwen/qwen3-next-80b-a3b-instruct",
2736      "name": "Qwen: Qwen3 Next 80B A3B Instruct",
2737      "cost_per_1m_in": 0.14,
2738      "cost_per_1m_out": 1.1,
2739      "cost_per_1m_in_cached": 0,
2740      "cost_per_1m_out_cached": 0,
2741      "context_window": 262144,
2742      "default_max_tokens": 26214,
2743      "can_reason": false,
2744      "supports_attachments": false,
2745      "options": {}
2746    },
2747    {
2748      "id": "qwen/qwen3-next-80b-a3b-thinking",
2749      "name": "Qwen: Qwen3 Next 80B A3B Thinking",
2750      "cost_per_1m_in": 0.15,
2751      "cost_per_1m_out": 1.2,
2752      "cost_per_1m_in_cached": 0,
2753      "cost_per_1m_out_cached": 0,
2754      "context_window": 262144,
2755      "default_max_tokens": 131072,
2756      "can_reason": true,
2757      "reasoning_levels": [
2758        "low",
2759        "medium",
2760        "high"
2761      ],
2762      "default_reasoning_effort": "medium",
2763      "supports_attachments": false,
2764      "options": {}
2765    },
2766    {
2767      "id": "qwen/qwen3-vl-235b-a22b-instruct",
2768      "name": "Qwen: Qwen3 VL 235B A22B Instruct",
2769      "cost_per_1m_in": 0.22,
2770      "cost_per_1m_out": 0.88,
2771      "cost_per_1m_in_cached": 0,
2772      "cost_per_1m_out_cached": 0,
2773      "context_window": 262144,
2774      "default_max_tokens": 26214,
2775      "can_reason": false,
2776      "supports_attachments": true,
2777      "options": {}
2778    },
2779    {
2780      "id": "qwen/qwen3-vl-235b-a22b-thinking",
2781      "name": "Qwen: Qwen3 VL 235B A22B Thinking",
2782      "cost_per_1m_in": 0.3,
2783      "cost_per_1m_out": 1.2,
2784      "cost_per_1m_in_cached": 0,
2785      "cost_per_1m_out_cached": 0,
2786      "context_window": 262144,
2787      "default_max_tokens": 131072,
2788      "can_reason": true,
2789      "reasoning_levels": [
2790        "low",
2791        "medium",
2792        "high"
2793      ],
2794      "default_reasoning_effort": "medium",
2795      "supports_attachments": true,
2796      "options": {}
2797    },
2798    {
2799      "id": "qwen/qwen3-vl-30b-a3b-instruct",
2800      "name": "Qwen: Qwen3 VL 30B A3B Instruct",
2801      "cost_per_1m_in": 0.15,
2802      "cost_per_1m_out": 0.6,
2803      "cost_per_1m_in_cached": 0,
2804      "cost_per_1m_out_cached": 0,
2805      "context_window": 262144,
2806      "default_max_tokens": 26214,
2807      "can_reason": false,
2808      "supports_attachments": true,
2809      "options": {}
2810    },
2811    {
2812      "id": "qwen/qwen3-vl-30b-a3b-thinking",
2813      "name": "Qwen: Qwen3 VL 30B A3B Thinking",
2814      "cost_per_1m_in": 0.16,
2815      "cost_per_1m_out": 0.7999999999999999,
2816      "cost_per_1m_in_cached": 0,
2817      "cost_per_1m_out_cached": 0,
2818      "context_window": 131072,
2819      "default_max_tokens": 16384,
2820      "can_reason": true,
2821      "reasoning_levels": [
2822        "low",
2823        "medium",
2824        "high"
2825      ],
2826      "default_reasoning_effort": "medium",
2827      "supports_attachments": true,
2828      "options": {}
2829    },
2830    {
2831      "id": "qwen/qwen3-vl-8b-instruct",
2832      "name": "Qwen: Qwen3 VL 8B Instruct",
2833      "cost_per_1m_in": 0.18,
2834      "cost_per_1m_out": 0.7,
2835      "cost_per_1m_in_cached": 0,
2836      "cost_per_1m_out_cached": 0,
2837      "context_window": 256000,
2838      "default_max_tokens": 16384,
2839      "can_reason": false,
2840      "supports_attachments": true,
2841      "options": {}
2842    },
2843    {
2844      "id": "qwen/qwen3-vl-8b-thinking",
2845      "name": "Qwen: Qwen3 VL 8B Thinking",
2846      "cost_per_1m_in": 0.18,
2847      "cost_per_1m_out": 2.0999999999999996,
2848      "cost_per_1m_in_cached": 0,
2849      "cost_per_1m_out_cached": 0,
2850      "context_window": 256000,
2851      "default_max_tokens": 16384,
2852      "can_reason": true,
2853      "reasoning_levels": [
2854        "low",
2855        "medium",
2856        "high"
2857      ],
2858      "default_reasoning_effort": "medium",
2859      "supports_attachments": true,
2860      "options": {}
2861    },
2862    {
2863      "id": "stepfun-ai/step3",
2864      "name": "StepFun: Step3",
2865      "cost_per_1m_in": 0.5700000000000001,
2866      "cost_per_1m_out": 1.42,
2867      "cost_per_1m_in_cached": 0,
2868      "cost_per_1m_out_cached": 0,
2869      "context_window": 65536,
2870      "default_max_tokens": 32768,
2871      "can_reason": true,
2872      "reasoning_levels": [
2873        "low",
2874        "medium",
2875        "high"
2876      ],
2877      "default_reasoning_effort": "medium",
2878      "supports_attachments": true,
2879      "options": {}
2880    },
2881    {
2882      "id": "tngtech/deepseek-r1t2-chimera",
2883      "name": "TNG: DeepSeek R1T2 Chimera",
2884      "cost_per_1m_in": 0.3,
2885      "cost_per_1m_out": 1.2,
2886      "cost_per_1m_in_cached": 0,
2887      "cost_per_1m_out_cached": 0,
2888      "context_window": 163840,
2889      "default_max_tokens": 81920,
2890      "can_reason": true,
2891      "reasoning_levels": [
2892        "low",
2893        "medium",
2894        "high"
2895      ],
2896      "default_reasoning_effort": "medium",
2897      "supports_attachments": false,
2898      "options": {}
2899    },
2900    {
2901      "id": "tngtech/tng-r1t-chimera",
2902      "name": "TNG: R1T Chimera",
2903      "cost_per_1m_in": 0.3,
2904      "cost_per_1m_out": 1.2,
2905      "cost_per_1m_in_cached": 0,
2906      "cost_per_1m_out_cached": 0,
2907      "context_window": 163840,
2908      "default_max_tokens": 81920,
2909      "can_reason": true,
2910      "reasoning_levels": [
2911        "low",
2912        "medium",
2913        "high"
2914      ],
2915      "default_reasoning_effort": "medium",
2916      "supports_attachments": false,
2917      "options": {}
2918    },
2919    {
2920      "id": "tngtech/tng-r1t-chimera:free",
2921      "name": "TNG: R1T Chimera (free)",
2922      "cost_per_1m_in": 0,
2923      "cost_per_1m_out": 0,
2924      "cost_per_1m_in_cached": 0,
2925      "cost_per_1m_out_cached": 0,
2926      "context_window": 163840,
2927      "default_max_tokens": 81920,
2928      "can_reason": true,
2929      "reasoning_levels": [
2930        "low",
2931        "medium",
2932        "high"
2933      ],
2934      "default_reasoning_effort": "medium",
2935      "supports_attachments": false,
2936      "options": {}
2937    },
2938    {
2939      "id": "thedrummer/rocinante-12b",
2940      "name": "TheDrummer: Rocinante 12B",
2941      "cost_per_1m_in": 0.16999999999999998,
2942      "cost_per_1m_out": 0.43,
2943      "cost_per_1m_in_cached": 0,
2944      "cost_per_1m_out_cached": 0,
2945      "context_window": 32768,
2946      "default_max_tokens": 3276,
2947      "can_reason": false,
2948      "supports_attachments": false,
2949      "options": {}
2950    },
2951    {
2952      "id": "thedrummer/unslopnemo-12b",
2953      "name": "TheDrummer: UnslopNemo 12B",
2954      "cost_per_1m_in": 0.39999999999999997,
2955      "cost_per_1m_out": 0.39999999999999997,
2956      "cost_per_1m_in_cached": 0,
2957      "cost_per_1m_out_cached": 0,
2958      "context_window": 32768,
2959      "default_max_tokens": 3276,
2960      "can_reason": false,
2961      "supports_attachments": false,
2962      "options": {}
2963    },
2964    {
2965      "id": "alibaba/tongyi-deepresearch-30b-a3b",
2966      "name": "Tongyi DeepResearch 30B A3B",
2967      "cost_per_1m_in": 0.09,
2968      "cost_per_1m_out": 0.39999999999999997,
2969      "cost_per_1m_in_cached": 0,
2970      "cost_per_1m_out_cached": 0,
2971      "context_window": 131072,
2972      "default_max_tokens": 65536,
2973      "can_reason": true,
2974      "reasoning_levels": [
2975        "low",
2976        "medium",
2977        "high"
2978      ],
2979      "default_reasoning_effort": "medium",
2980      "supports_attachments": false,
2981      "options": {}
2982    },
2983    {
2984      "id": "alibaba/tongyi-deepresearch-30b-a3b:free",
2985      "name": "Tongyi DeepResearch 30B A3B (free)",
2986      "cost_per_1m_in": 0,
2987      "cost_per_1m_out": 0,
2988      "cost_per_1m_in_cached": 0,
2989      "cost_per_1m_out_cached": 0,
2990      "context_window": 131072,
2991      "default_max_tokens": 65536,
2992      "can_reason": true,
2993      "reasoning_levels": [
2994        "low",
2995        "medium",
2996        "high"
2997      ],
2998      "default_reasoning_effort": "medium",
2999      "supports_attachments": false,
3000      "options": {}
3001    },
3002    {
3003      "id": "z-ai/glm-4-32b",
3004      "name": "Z.AI: GLM 4 32B ",
3005      "cost_per_1m_in": 0.09999999999999999,
3006      "cost_per_1m_out": 0.09999999999999999,
3007      "cost_per_1m_in_cached": 0,
3008      "cost_per_1m_out_cached": 0,
3009      "context_window": 128000,
3010      "default_max_tokens": 12800,
3011      "can_reason": false,
3012      "supports_attachments": false,
3013      "options": {}
3014    },
3015    {
3016      "id": "z-ai/glm-4.5",
3017      "name": "Z.AI: GLM 4.5",
3018      "cost_per_1m_in": 0.6,
3019      "cost_per_1m_out": 2.2,
3020      "cost_per_1m_in_cached": 0,
3021      "cost_per_1m_out_cached": 0.11,
3022      "context_window": 131072,
3023      "default_max_tokens": 48000,
3024      "can_reason": true,
3025      "reasoning_levels": [
3026        "low",
3027        "medium",
3028        "high"
3029      ],
3030      "default_reasoning_effort": "medium",
3031      "supports_attachments": false,
3032      "options": {}
3033    },
3034    {
3035      "id": "z-ai/glm-4.5-air",
3036      "name": "Z.AI: GLM 4.5 Air",
3037      "cost_per_1m_in": 0.10400000000000001,
3038      "cost_per_1m_out": 0.6799999999999999,
3039      "cost_per_1m_in_cached": 0,
3040      "cost_per_1m_out_cached": 0,
3041      "context_window": 131072,
3042      "default_max_tokens": 49152,
3043      "can_reason": true,
3044      "reasoning_levels": [
3045        "low",
3046        "medium",
3047        "high"
3048      ],
3049      "default_reasoning_effort": "medium",
3050      "supports_attachments": false,
3051      "options": {}
3052    },
3053    {
3054      "id": "z-ai/glm-4.5-air:free",
3055      "name": "Z.AI: GLM 4.5 Air (free)",
3056      "cost_per_1m_in": 0,
3057      "cost_per_1m_out": 0,
3058      "cost_per_1m_in_cached": 0,
3059      "cost_per_1m_out_cached": 0,
3060      "context_window": 131072,
3061      "default_max_tokens": 48000,
3062      "can_reason": true,
3063      "reasoning_levels": [
3064        "low",
3065        "medium",
3066        "high"
3067      ],
3068      "default_reasoning_effort": "medium",
3069      "supports_attachments": false,
3070      "options": {}
3071    },
3072    {
3073      "id": "z-ai/glm-4.5v",
3074      "name": "Z.AI: GLM 4.5V",
3075      "cost_per_1m_in": 0.48,
3076      "cost_per_1m_out": 1.44,
3077      "cost_per_1m_in_cached": 0,
3078      "cost_per_1m_out_cached": 0.088,
3079      "context_window": 65536,
3080      "default_max_tokens": 8192,
3081      "can_reason": true,
3082      "reasoning_levels": [
3083        "low",
3084        "medium",
3085        "high"
3086      ],
3087      "default_reasoning_effort": "medium",
3088      "supports_attachments": true,
3089      "options": {}
3090    },
3091    {
3092      "id": "z-ai/glm-4.6",
3093      "name": "Z.AI: GLM 4.6",
3094      "cost_per_1m_in": 0.5,
3095      "cost_per_1m_out": 1.9,
3096      "cost_per_1m_in_cached": 0,
3097      "cost_per_1m_out_cached": 0,
3098      "context_window": 204800,
3099      "default_max_tokens": 102400,
3100      "can_reason": true,
3101      "reasoning_levels": [
3102        "low",
3103        "medium",
3104        "high"
3105      ],
3106      "default_reasoning_effort": "medium",
3107      "supports_attachments": false,
3108      "options": {}
3109    },
3110    {
3111      "id": "z-ai/glm-4.6:exacto",
3112      "name": "Z.AI: GLM 4.6 (exacto)",
3113      "cost_per_1m_in": 0.44,
3114      "cost_per_1m_out": 1.76,
3115      "cost_per_1m_in_cached": 0,
3116      "cost_per_1m_out_cached": 0,
3117      "context_window": 204800,
3118      "default_max_tokens": 65536,
3119      "can_reason": true,
3120      "reasoning_levels": [
3121        "low",
3122        "medium",
3123        "high"
3124      ],
3125      "default_reasoning_effort": "medium",
3126      "supports_attachments": false,
3127      "options": {}
3128    },
3129    {
3130      "id": "x-ai/grok-3",
3131      "name": "xAI: Grok 3",
3132      "cost_per_1m_in": 3,
3133      "cost_per_1m_out": 15,
3134      "cost_per_1m_in_cached": 0,
3135      "cost_per_1m_out_cached": 0.75,
3136      "context_window": 131072,
3137      "default_max_tokens": 13107,
3138      "can_reason": false,
3139      "supports_attachments": false,
3140      "options": {}
3141    },
3142    {
3143      "id": "x-ai/grok-3-beta",
3144      "name": "xAI: Grok 3 Beta",
3145      "cost_per_1m_in": 3,
3146      "cost_per_1m_out": 15,
3147      "cost_per_1m_in_cached": 0,
3148      "cost_per_1m_out_cached": 0.75,
3149      "context_window": 131072,
3150      "default_max_tokens": 13107,
3151      "can_reason": false,
3152      "supports_attachments": false,
3153      "options": {}
3154    },
3155    {
3156      "id": "x-ai/grok-3-mini",
3157      "name": "xAI: Grok 3 Mini",
3158      "cost_per_1m_in": 0.6,
3159      "cost_per_1m_out": 4,
3160      "cost_per_1m_in_cached": 0,
3161      "cost_per_1m_out_cached": 0.15,
3162      "context_window": 131072,
3163      "default_max_tokens": 13107,
3164      "can_reason": true,
3165      "reasoning_levels": [
3166        "low",
3167        "medium",
3168        "high"
3169      ],
3170      "default_reasoning_effort": "medium",
3171      "supports_attachments": false,
3172      "options": {}
3173    },
3174    {
3175      "id": "x-ai/grok-3-mini-beta",
3176      "name": "xAI: Grok 3 Mini Beta",
3177      "cost_per_1m_in": 0.6,
3178      "cost_per_1m_out": 4,
3179      "cost_per_1m_in_cached": 0,
3180      "cost_per_1m_out_cached": 0.15,
3181      "context_window": 131072,
3182      "default_max_tokens": 13107,
3183      "can_reason": true,
3184      "reasoning_levels": [
3185        "low",
3186        "medium",
3187        "high"
3188      ],
3189      "default_reasoning_effort": "medium",
3190      "supports_attachments": false,
3191      "options": {}
3192    },
3193    {
3194      "id": "x-ai/grok-4",
3195      "name": "xAI: Grok 4",
3196      "cost_per_1m_in": 3,
3197      "cost_per_1m_out": 15,
3198      "cost_per_1m_in_cached": 0,
3199      "cost_per_1m_out_cached": 0.75,
3200      "context_window": 256000,
3201      "default_max_tokens": 25600,
3202      "can_reason": true,
3203      "reasoning_levels": [
3204        "low",
3205        "medium",
3206        "high"
3207      ],
3208      "default_reasoning_effort": "medium",
3209      "supports_attachments": true,
3210      "options": {}
3211    },
3212    {
3213      "id": "x-ai/grok-4-fast",
3214      "name": "xAI: Grok 4 Fast",
3215      "cost_per_1m_in": 0.19999999999999998,
3216      "cost_per_1m_out": 0.5,
3217      "cost_per_1m_in_cached": 0,
3218      "cost_per_1m_out_cached": 0.049999999999999996,
3219      "context_window": 2000000,
3220      "default_max_tokens": 15000,
3221      "can_reason": true,
3222      "reasoning_levels": [
3223        "low",
3224        "medium",
3225        "high"
3226      ],
3227      "default_reasoning_effort": "medium",
3228      "supports_attachments": true,
3229      "options": {}
3230    },
3231    {
3232      "id": "x-ai/grok-4.1-fast:free",
3233      "name": "xAI: Grok 4.1 Fast (free)",
3234      "cost_per_1m_in": 0,
3235      "cost_per_1m_out": 0,
3236      "cost_per_1m_in_cached": 0,
3237      "cost_per_1m_out_cached": 0,
3238      "context_window": 2000000,
3239      "default_max_tokens": 15000,
3240      "can_reason": true,
3241      "reasoning_levels": [
3242        "low",
3243        "medium",
3244        "high"
3245      ],
3246      "default_reasoning_effort": "medium",
3247      "supports_attachments": true,
3248      "options": {}
3249    },
3250    {
3251      "id": "x-ai/grok-code-fast-1",
3252      "name": "xAI: Grok Code Fast 1",
3253      "cost_per_1m_in": 0.19999999999999998,
3254      "cost_per_1m_out": 1.5,
3255      "cost_per_1m_in_cached": 0,
3256      "cost_per_1m_out_cached": 0.02,
3257      "context_window": 256000,
3258      "default_max_tokens": 5000,
3259      "can_reason": true,
3260      "reasoning_levels": [
3261        "low",
3262        "medium",
3263        "high"
3264      ],
3265      "default_reasoning_effort": "medium",
3266      "supports_attachments": false,
3267      "options": {}
3268    }
3269  ],
3270  "default_headers": {
3271    "HTTP-Referer": "https://charm.land",
3272    "X-Title": "Crush"
3273  }
3274}