openrouter.json

   1{
   2  "name": "OpenRouter",
   3  "id": "openrouter",
   4  "api_key": "$OPENROUTER_API_KEY",
   5  "api_endpoint": "https://openrouter.ai/api/v1",
   6  "type": "openrouter",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-3.5-haiku",
   9  "models": [
  10    {
  11      "id": "ai21/jamba-large-1.7",
  12      "name": "AI21: Jamba Large 1.7",
  13      "cost_per_1m_in": 2,
  14      "cost_per_1m_out": 8,
  15      "cost_per_1m_in_cached": 0,
  16      "cost_per_1m_out_cached": 0,
  17      "context_window": 256000,
  18      "default_max_tokens": 2048,
  19      "can_reason": false,
  20      "supports_attachments": false,
  21      "options": {}
  22    },
  23    {
  24      "id": "ai21/jamba-mini-1.7",
  25      "name": "AI21: Jamba Mini 1.7",
  26      "cost_per_1m_in": 0.19999999999999998,
  27      "cost_per_1m_out": 0.39999999999999997,
  28      "cost_per_1m_in_cached": 0,
  29      "cost_per_1m_out_cached": 0,
  30      "context_window": 256000,
  31      "default_max_tokens": 2048,
  32      "can_reason": false,
  33      "supports_attachments": false,
  34      "options": {}
  35    },
  36    {
  37      "id": "allenai/olmo-3-7b-instruct",
  38      "name": "AllenAI: Olmo 3 7B Instruct",
  39      "cost_per_1m_in": 0.09999999999999999,
  40      "cost_per_1m_out": 0.19999999999999998,
  41      "cost_per_1m_in_cached": 0,
  42      "cost_per_1m_out_cached": 0,
  43      "context_window": 65536,
  44      "default_max_tokens": 32768,
  45      "can_reason": false,
  46      "supports_attachments": false,
  47      "options": {}
  48    },
  49    {
  50      "id": "amazon/nova-lite-v1",
  51      "name": "Amazon: Nova Lite 1.0",
  52      "cost_per_1m_in": 0.06,
  53      "cost_per_1m_out": 0.24,
  54      "cost_per_1m_in_cached": 0,
  55      "cost_per_1m_out_cached": 0,
  56      "context_window": 300000,
  57      "default_max_tokens": 2560,
  58      "can_reason": false,
  59      "supports_attachments": true,
  60      "options": {}
  61    },
  62    {
  63      "id": "amazon/nova-micro-v1",
  64      "name": "Amazon: Nova Micro 1.0",
  65      "cost_per_1m_in": 0.035,
  66      "cost_per_1m_out": 0.14,
  67      "cost_per_1m_in_cached": 0,
  68      "cost_per_1m_out_cached": 0,
  69      "context_window": 128000,
  70      "default_max_tokens": 2560,
  71      "can_reason": false,
  72      "supports_attachments": false,
  73      "options": {}
  74    },
  75    {
  76      "id": "amazon/nova-premier-v1",
  77      "name": "Amazon: Nova Premier 1.0",
  78      "cost_per_1m_in": 2.5,
  79      "cost_per_1m_out": 12.5,
  80      "cost_per_1m_in_cached": 0,
  81      "cost_per_1m_out_cached": 0.625,
  82      "context_window": 1000000,
  83      "default_max_tokens": 16000,
  84      "can_reason": false,
  85      "supports_attachments": true,
  86      "options": {}
  87    },
  88    {
  89      "id": "amazon/nova-pro-v1",
  90      "name": "Amazon: Nova Pro 1.0",
  91      "cost_per_1m_in": 0.7999999999999999,
  92      "cost_per_1m_out": 3.1999999999999997,
  93      "cost_per_1m_in_cached": 0,
  94      "cost_per_1m_out_cached": 0,
  95      "context_window": 300000,
  96      "default_max_tokens": 2560,
  97      "can_reason": false,
  98      "supports_attachments": true,
  99      "options": {}
 100    },
 101    {
 102      "id": "anthropic/claude-3-haiku",
 103      "name": "Anthropic: Claude 3 Haiku",
 104      "cost_per_1m_in": 0.25,
 105      "cost_per_1m_out": 1.25,
 106      "cost_per_1m_in_cached": 0.3,
 107      "cost_per_1m_out_cached": 0.03,
 108      "context_window": 200000,
 109      "default_max_tokens": 2048,
 110      "can_reason": false,
 111      "supports_attachments": true,
 112      "options": {}
 113    },
 114    {
 115      "id": "anthropic/claude-3-opus",
 116      "name": "Anthropic: Claude 3 Opus",
 117      "cost_per_1m_in": 15,
 118      "cost_per_1m_out": 75,
 119      "cost_per_1m_in_cached": 18.75,
 120      "cost_per_1m_out_cached": 1.5,
 121      "context_window": 200000,
 122      "default_max_tokens": 2048,
 123      "can_reason": false,
 124      "supports_attachments": true,
 125      "options": {}
 126    },
 127    {
 128      "id": "anthropic/claude-3.5-haiku",
 129      "name": "Anthropic: Claude 3.5 Haiku",
 130      "cost_per_1m_in": 0.7999999999999999,
 131      "cost_per_1m_out": 4,
 132      "cost_per_1m_in_cached": 1,
 133      "cost_per_1m_out_cached": 0.08,
 134      "context_window": 200000,
 135      "default_max_tokens": 4096,
 136      "can_reason": false,
 137      "supports_attachments": true,
 138      "options": {}
 139    },
 140    {
 141      "id": "anthropic/claude-3.5-haiku-20241022",
 142      "name": "Anthropic: Claude 3.5 Haiku (2024-10-22)",
 143      "cost_per_1m_in": 0.7999999999999999,
 144      "cost_per_1m_out": 4,
 145      "cost_per_1m_in_cached": 1,
 146      "cost_per_1m_out_cached": 0.08,
 147      "context_window": 200000,
 148      "default_max_tokens": 4096,
 149      "can_reason": false,
 150      "supports_attachments": true,
 151      "options": {}
 152    },
 153    {
 154      "id": "anthropic/claude-3.5-sonnet",
 155      "name": "Anthropic: Claude 3.5 Sonnet",
 156      "cost_per_1m_in": 3,
 157      "cost_per_1m_out": 15,
 158      "cost_per_1m_in_cached": 3.75,
 159      "cost_per_1m_out_cached": 0.3,
 160      "context_window": 200000,
 161      "default_max_tokens": 4096,
 162      "can_reason": false,
 163      "supports_attachments": true,
 164      "options": {}
 165    },
 166    {
 167      "id": "anthropic/claude-3.7-sonnet",
 168      "name": "Anthropic: Claude 3.7 Sonnet",
 169      "cost_per_1m_in": 3,
 170      "cost_per_1m_out": 15,
 171      "cost_per_1m_in_cached": 3.75,
 172      "cost_per_1m_out_cached": 0.3,
 173      "context_window": 200000,
 174      "default_max_tokens": 32000,
 175      "can_reason": true,
 176      "reasoning_levels": [
 177        "low",
 178        "medium",
 179        "high"
 180      ],
 181      "default_reasoning_effort": "medium",
 182      "supports_attachments": true,
 183      "options": {}
 184    },
 185    {
 186      "id": "anthropic/claude-3.7-sonnet:thinking",
 187      "name": "Anthropic: Claude 3.7 Sonnet (thinking)",
 188      "cost_per_1m_in": 3,
 189      "cost_per_1m_out": 15,
 190      "cost_per_1m_in_cached": 3.75,
 191      "cost_per_1m_out_cached": 0.3,
 192      "context_window": 200000,
 193      "default_max_tokens": 32000,
 194      "can_reason": true,
 195      "reasoning_levels": [
 196        "low",
 197        "medium",
 198        "high"
 199      ],
 200      "default_reasoning_effort": "medium",
 201      "supports_attachments": true,
 202      "options": {}
 203    },
 204    {
 205      "id": "anthropic/claude-haiku-4.5",
 206      "name": "Anthropic: Claude Haiku 4.5",
 207      "cost_per_1m_in": 1,
 208      "cost_per_1m_out": 5,
 209      "cost_per_1m_in_cached": 1.25,
 210      "cost_per_1m_out_cached": 0.09999999999999999,
 211      "context_window": 200000,
 212      "default_max_tokens": 32000,
 213      "can_reason": true,
 214      "reasoning_levels": [
 215        "low",
 216        "medium",
 217        "high"
 218      ],
 219      "default_reasoning_effort": "medium",
 220      "supports_attachments": true,
 221      "options": {}
 222    },
 223    {
 224      "id": "anthropic/claude-opus-4",
 225      "name": "Anthropic: Claude Opus 4",
 226      "cost_per_1m_in": 15,
 227      "cost_per_1m_out": 75,
 228      "cost_per_1m_in_cached": 18.75,
 229      "cost_per_1m_out_cached": 1.5,
 230      "context_window": 200000,
 231      "default_max_tokens": 16000,
 232      "can_reason": true,
 233      "reasoning_levels": [
 234        "low",
 235        "medium",
 236        "high"
 237      ],
 238      "default_reasoning_effort": "medium",
 239      "supports_attachments": true,
 240      "options": {}
 241    },
 242    {
 243      "id": "anthropic/claude-opus-4.1",
 244      "name": "Anthropic: Claude Opus 4.1",
 245      "cost_per_1m_in": 15,
 246      "cost_per_1m_out": 75,
 247      "cost_per_1m_in_cached": 18.75,
 248      "cost_per_1m_out_cached": 1.5,
 249      "context_window": 200000,
 250      "default_max_tokens": 16000,
 251      "can_reason": true,
 252      "reasoning_levels": [
 253        "low",
 254        "medium",
 255        "high"
 256      ],
 257      "default_reasoning_effort": "medium",
 258      "supports_attachments": true,
 259      "options": {}
 260    },
 261    {
 262      "id": "anthropic/claude-sonnet-4",
 263      "name": "Anthropic: Claude Sonnet 4",
 264      "cost_per_1m_in": 3,
 265      "cost_per_1m_out": 15,
 266      "cost_per_1m_in_cached": 3.75,
 267      "cost_per_1m_out_cached": 0.3,
 268      "context_window": 1000000,
 269      "default_max_tokens": 32000,
 270      "can_reason": true,
 271      "reasoning_levels": [
 272        "low",
 273        "medium",
 274        "high"
 275      ],
 276      "default_reasoning_effort": "medium",
 277      "supports_attachments": true,
 278      "options": {}
 279    },
 280    {
 281      "id": "anthropic/claude-sonnet-4.5",
 282      "name": "Anthropic: Claude Sonnet 4.5",
 283      "cost_per_1m_in": 3,
 284      "cost_per_1m_out": 15,
 285      "cost_per_1m_in_cached": 3.75,
 286      "cost_per_1m_out_cached": 0.3,
 287      "context_window": 1000000,
 288      "default_max_tokens": 32000,
 289      "can_reason": true,
 290      "reasoning_levels": [
 291        "low",
 292        "medium",
 293        "high"
 294      ],
 295      "default_reasoning_effort": "medium",
 296      "supports_attachments": true,
 297      "options": {}
 298    },
 299    {
 300      "id": "arcee-ai/virtuoso-large",
 301      "name": "Arcee AI: Virtuoso Large",
 302      "cost_per_1m_in": 0.75,
 303      "cost_per_1m_out": 1.2,
 304      "cost_per_1m_in_cached": 0,
 305      "cost_per_1m_out_cached": 0,
 306      "context_window": 131072,
 307      "default_max_tokens": 32000,
 308      "can_reason": false,
 309      "supports_attachments": false,
 310      "options": {}
 311    },
 312    {
 313      "id": "baidu/ernie-4.5-21b-a3b",
 314      "name": "Baidu: ERNIE 4.5 21B A3B",
 315      "cost_per_1m_in": 0.07,
 316      "cost_per_1m_out": 0.28,
 317      "cost_per_1m_in_cached": 0,
 318      "cost_per_1m_out_cached": 0,
 319      "context_window": 120000,
 320      "default_max_tokens": 4000,
 321      "can_reason": false,
 322      "supports_attachments": false,
 323      "options": {}
 324    },
 325    {
 326      "id": "baidu/ernie-4.5-vl-28b-a3b",
 327      "name": "Baidu: ERNIE 4.5 VL 28B A3B",
 328      "cost_per_1m_in": 0.14,
 329      "cost_per_1m_out": 0.56,
 330      "cost_per_1m_in_cached": 0,
 331      "cost_per_1m_out_cached": 0,
 332      "context_window": 30000,
 333      "default_max_tokens": 4000,
 334      "can_reason": true,
 335      "reasoning_levels": [
 336        "low",
 337        "medium",
 338        "high"
 339      ],
 340      "default_reasoning_effort": "medium",
 341      "supports_attachments": true,
 342      "options": {}
 343    },
 344    {
 345      "id": "deepcogito/cogito-v2-preview-llama-109b-moe",
 346      "name": "Cogito V2 Preview Llama 109B",
 347      "cost_per_1m_in": 0.18,
 348      "cost_per_1m_out": 0.59,
 349      "cost_per_1m_in_cached": 0,
 350      "cost_per_1m_out_cached": 0,
 351      "context_window": 32767,
 352      "default_max_tokens": 3276,
 353      "can_reason": true,
 354      "reasoning_levels": [
 355        "low",
 356        "medium",
 357        "high"
 358      ],
 359      "default_reasoning_effort": "medium",
 360      "supports_attachments": true,
 361      "options": {}
 362    },
 363    {
 364      "id": "cohere/command-r-08-2024",
 365      "name": "Cohere: Command R (08-2024)",
 366      "cost_per_1m_in": 0.15,
 367      "cost_per_1m_out": 0.6,
 368      "cost_per_1m_in_cached": 0,
 369      "cost_per_1m_out_cached": 0,
 370      "context_window": 128000,
 371      "default_max_tokens": 2000,
 372      "can_reason": false,
 373      "supports_attachments": false,
 374      "options": {}
 375    },
 376    {
 377      "id": "cohere/command-r-plus-08-2024",
 378      "name": "Cohere: Command R+ (08-2024)",
 379      "cost_per_1m_in": 2.5,
 380      "cost_per_1m_out": 10,
 381      "cost_per_1m_in_cached": 0,
 382      "cost_per_1m_out_cached": 0,
 383      "context_window": 128000,
 384      "default_max_tokens": 2000,
 385      "can_reason": false,
 386      "supports_attachments": false,
 387      "options": {}
 388    },
 389    {
 390      "id": "deepcogito/cogito-v2-preview-llama-405b",
 391      "name": "Deep Cogito: Cogito V2 Preview Llama 405B",
 392      "cost_per_1m_in": 3.5,
 393      "cost_per_1m_out": 3.5,
 394      "cost_per_1m_in_cached": 0,
 395      "cost_per_1m_out_cached": 0,
 396      "context_window": 32768,
 397      "default_max_tokens": 3276,
 398      "can_reason": true,
 399      "reasoning_levels": [
 400        "low",
 401        "medium",
 402        "high"
 403      ],
 404      "default_reasoning_effort": "medium",
 405      "supports_attachments": false,
 406      "options": {}
 407    },
 408    {
 409      "id": "deepcogito/cogito-v2-preview-llama-70b",
 410      "name": "Deep Cogito: Cogito V2 Preview Llama 70B",
 411      "cost_per_1m_in": 0.88,
 412      "cost_per_1m_out": 0.88,
 413      "cost_per_1m_in_cached": 0,
 414      "cost_per_1m_out_cached": 0,
 415      "context_window": 32768,
 416      "default_max_tokens": 3276,
 417      "can_reason": true,
 418      "reasoning_levels": [
 419        "low",
 420        "medium",
 421        "high"
 422      ],
 423      "default_reasoning_effort": "medium",
 424      "supports_attachments": false,
 425      "options": {}
 426    },
 427    {
 428      "id": "deepseek/deepseek-chat",
 429      "name": "DeepSeek: DeepSeek V3",
 430      "cost_per_1m_in": 0.39999999999999997,
 431      "cost_per_1m_out": 1.3,
 432      "cost_per_1m_in_cached": 0,
 433      "cost_per_1m_out_cached": 0,
 434      "context_window": 64000,
 435      "default_max_tokens": 8000,
 436      "can_reason": false,
 437      "supports_attachments": false,
 438      "options": {}
 439    },
 440    {
 441      "id": "deepseek/deepseek-chat-v3-0324",
 442      "name": "DeepSeek: DeepSeek V3 0324",
 443      "cost_per_1m_in": 0.27,
 444      "cost_per_1m_out": 1.12,
 445      "cost_per_1m_in_cached": 0,
 446      "cost_per_1m_out_cached": 0.135,
 447      "context_window": 163840,
 448      "default_max_tokens": 81920,
 449      "can_reason": false,
 450      "supports_attachments": false,
 451      "options": {}
 452    },
 453    {
 454      "id": "deepseek/deepseek-chat-v3-0324:free",
 455      "name": "DeepSeek: DeepSeek V3 0324 (free)",
 456      "cost_per_1m_in": 0,
 457      "cost_per_1m_out": 0,
 458      "cost_per_1m_in_cached": 0,
 459      "cost_per_1m_out_cached": 0,
 460      "context_window": 163840,
 461      "default_max_tokens": 16384,
 462      "can_reason": false,
 463      "supports_attachments": false,
 464      "options": {}
 465    },
 466    {
 467      "id": "deepseek/deepseek-chat-v3.1",
 468      "name": "DeepSeek: DeepSeek V3.1",
 469      "cost_per_1m_in": 0.27,
 470      "cost_per_1m_out": 1,
 471      "cost_per_1m_in_cached": 0,
 472      "cost_per_1m_out_cached": 0,
 473      "context_window": 163840,
 474      "default_max_tokens": 16384,
 475      "can_reason": true,
 476      "reasoning_levels": [
 477        "low",
 478        "medium",
 479        "high"
 480      ],
 481      "default_reasoning_effort": "medium",
 482      "supports_attachments": false,
 483      "options": {}
 484    },
 485    {
 486      "id": "deepseek/deepseek-v3.1-terminus",
 487      "name": "DeepSeek: DeepSeek V3.1 Terminus",
 488      "cost_per_1m_in": 0.22999999999999998,
 489      "cost_per_1m_out": 0.8999999999999999,
 490      "cost_per_1m_in_cached": 0,
 491      "cost_per_1m_out_cached": 0,
 492      "context_window": 163840,
 493      "default_max_tokens": 81920,
 494      "can_reason": true,
 495      "reasoning_levels": [
 496        "low",
 497        "medium",
 498        "high"
 499      ],
 500      "default_reasoning_effort": "medium",
 501      "supports_attachments": false,
 502      "options": {}
 503    },
 504    {
 505      "id": "deepseek/deepseek-v3.1-terminus:exacto",
 506      "name": "DeepSeek: DeepSeek V3.1 Terminus (exacto)",
 507      "cost_per_1m_in": 0.27,
 508      "cost_per_1m_out": 1,
 509      "cost_per_1m_in_cached": 0,
 510      "cost_per_1m_out_cached": 0,
 511      "context_window": 163840,
 512      "default_max_tokens": 16384,
 513      "can_reason": true,
 514      "reasoning_levels": [
 515        "low",
 516        "medium",
 517        "high"
 518      ],
 519      "default_reasoning_effort": "medium",
 520      "supports_attachments": false,
 521      "options": {}
 522    },
 523    {
 524      "id": "deepseek/deepseek-v3.2-exp",
 525      "name": "DeepSeek: DeepSeek V3.2 Exp",
 526      "cost_per_1m_in": 0.27,
 527      "cost_per_1m_out": 0.39999999999999997,
 528      "cost_per_1m_in_cached": 0,
 529      "cost_per_1m_out_cached": 0,
 530      "context_window": 163840,
 531      "default_max_tokens": 16384,
 532      "can_reason": true,
 533      "reasoning_levels": [
 534        "low",
 535        "medium",
 536        "high"
 537      ],
 538      "default_reasoning_effort": "medium",
 539      "supports_attachments": false,
 540      "options": {}
 541    },
 542    {
 543      "id": "deepseek/deepseek-r1",
 544      "name": "DeepSeek: R1",
 545      "cost_per_1m_in": 0.7,
 546      "cost_per_1m_out": 2.4,
 547      "cost_per_1m_in_cached": 0,
 548      "cost_per_1m_out_cached": 0,
 549      "context_window": 163840,
 550      "default_max_tokens": 81920,
 551      "can_reason": true,
 552      "reasoning_levels": [
 553        "low",
 554        "medium",
 555        "high"
 556      ],
 557      "default_reasoning_effort": "medium",
 558      "supports_attachments": false,
 559      "options": {}
 560    },
 561    {
 562      "id": "deepseek/deepseek-r1-0528",
 563      "name": "DeepSeek: R1 0528",
 564      "cost_per_1m_in": 0.39999999999999997,
 565      "cost_per_1m_out": 1.75,
 566      "cost_per_1m_in_cached": 0,
 567      "cost_per_1m_out_cached": 0,
 568      "context_window": 163840,
 569      "default_max_tokens": 81920,
 570      "can_reason": true,
 571      "reasoning_levels": [
 572        "low",
 573        "medium",
 574        "high"
 575      ],
 576      "default_reasoning_effort": "medium",
 577      "supports_attachments": false,
 578      "options": {}
 579    },
 580    {
 581      "id": "deepseek/deepseek-r1-distill-llama-70b",
 582      "name": "DeepSeek: R1 Distill Llama 70B",
 583      "cost_per_1m_in": 0.03,
 584      "cost_per_1m_out": 0.13,
 585      "cost_per_1m_in_cached": 0,
 586      "cost_per_1m_out_cached": 0,
 587      "context_window": 131072,
 588      "default_max_tokens": 65536,
 589      "can_reason": true,
 590      "reasoning_levels": [
 591        "low",
 592        "medium",
 593        "high"
 594      ],
 595      "default_reasoning_effort": "medium",
 596      "supports_attachments": false,
 597      "options": {}
 598    },
 599    {
 600      "id": "google/gemini-2.0-flash-001",
 601      "name": "Google: Gemini 2.0 Flash",
 602      "cost_per_1m_in": 0.09999999999999999,
 603      "cost_per_1m_out": 0.39999999999999997,
 604      "cost_per_1m_in_cached": 0.18330000000000002,
 605      "cost_per_1m_out_cached": 0.024999999999999998,
 606      "context_window": 1048576,
 607      "default_max_tokens": 4096,
 608      "can_reason": false,
 609      "supports_attachments": true,
 610      "options": {}
 611    },
 612    {
 613      "id": "google/gemini-2.0-flash-exp:free",
 614      "name": "Google: Gemini 2.0 Flash Experimental (free)",
 615      "cost_per_1m_in": 0,
 616      "cost_per_1m_out": 0,
 617      "cost_per_1m_in_cached": 0,
 618      "cost_per_1m_out_cached": 0,
 619      "context_window": 1048576,
 620      "default_max_tokens": 4096,
 621      "can_reason": false,
 622      "supports_attachments": true,
 623      "options": {}
 624    },
 625    {
 626      "id": "google/gemini-2.0-flash-lite-001",
 627      "name": "Google: Gemini 2.0 Flash Lite",
 628      "cost_per_1m_in": 0.075,
 629      "cost_per_1m_out": 0.3,
 630      "cost_per_1m_in_cached": 0,
 631      "cost_per_1m_out_cached": 0,
 632      "context_window": 1048576,
 633      "default_max_tokens": 4096,
 634      "can_reason": false,
 635      "supports_attachments": true,
 636      "options": {}
 637    },
 638    {
 639      "id": "google/gemini-2.5-flash",
 640      "name": "Google: Gemini 2.5 Flash",
 641      "cost_per_1m_in": 0.3,
 642      "cost_per_1m_out": 2.5,
 643      "cost_per_1m_in_cached": 0.3833,
 644      "cost_per_1m_out_cached": 0.03,
 645      "context_window": 1048576,
 646      "default_max_tokens": 32767,
 647      "can_reason": true,
 648      "reasoning_levels": [
 649        "low",
 650        "medium",
 651        "high"
 652      ],
 653      "default_reasoning_effort": "medium",
 654      "supports_attachments": true,
 655      "options": {}
 656    },
 657    {
 658      "id": "google/gemini-2.5-flash-lite",
 659      "name": "Google: Gemini 2.5 Flash Lite",
 660      "cost_per_1m_in": 0.09999999999999999,
 661      "cost_per_1m_out": 0.39999999999999997,
 662      "cost_per_1m_in_cached": 0.18330000000000002,
 663      "cost_per_1m_out_cached": 0.01,
 664      "context_window": 1048576,
 665      "default_max_tokens": 32767,
 666      "can_reason": true,
 667      "reasoning_levels": [
 668        "low",
 669        "medium",
 670        "high"
 671      ],
 672      "default_reasoning_effort": "medium",
 673      "supports_attachments": true,
 674      "options": {}
 675    },
 676    {
 677      "id": "google/gemini-2.5-flash-lite-preview-09-2025",
 678      "name": "Google: Gemini 2.5 Flash Lite Preview 09-2025",
 679      "cost_per_1m_in": 0.09999999999999999,
 680      "cost_per_1m_out": 0.39999999999999997,
 681      "cost_per_1m_in_cached": 0,
 682      "cost_per_1m_out_cached": 0,
 683      "context_window": 1048576,
 684      "default_max_tokens": 32767,
 685      "can_reason": true,
 686      "reasoning_levels": [
 687        "low",
 688        "medium",
 689        "high"
 690      ],
 691      "default_reasoning_effort": "medium",
 692      "supports_attachments": true,
 693      "options": {}
 694    },
 695    {
 696      "id": "google/gemini-2.5-flash-preview-09-2025",
 697      "name": "Google: Gemini 2.5 Flash Preview 09-2025",
 698      "cost_per_1m_in": 0.3,
 699      "cost_per_1m_out": 2.5,
 700      "cost_per_1m_in_cached": 0.3833,
 701      "cost_per_1m_out_cached": 0.075,
 702      "context_window": 1048576,
 703      "default_max_tokens": 32767,
 704      "can_reason": true,
 705      "reasoning_levels": [
 706        "low",
 707        "medium",
 708        "high"
 709      ],
 710      "default_reasoning_effort": "medium",
 711      "supports_attachments": true,
 712      "options": {}
 713    },
 714    {
 715      "id": "google/gemini-2.5-pro",
 716      "name": "Google: Gemini 2.5 Pro",
 717      "cost_per_1m_in": 1.25,
 718      "cost_per_1m_out": 10,
 719      "cost_per_1m_in_cached": 1.625,
 720      "cost_per_1m_out_cached": 0.125,
 721      "context_window": 1048576,
 722      "default_max_tokens": 32768,
 723      "can_reason": true,
 724      "reasoning_levels": [
 725        "low",
 726        "medium",
 727        "high"
 728      ],
 729      "default_reasoning_effort": "medium",
 730      "supports_attachments": true,
 731      "options": {}
 732    },
 733    {
 734      "id": "google/gemini-2.5-pro-preview-05-06",
 735      "name": "Google: Gemini 2.5 Pro Preview 05-06",
 736      "cost_per_1m_in": 1.25,
 737      "cost_per_1m_out": 10,
 738      "cost_per_1m_in_cached": 1.625,
 739      "cost_per_1m_out_cached": 0.125,
 740      "context_window": 1048576,
 741      "default_max_tokens": 32768,
 742      "can_reason": true,
 743      "reasoning_levels": [
 744        "low",
 745        "medium",
 746        "high"
 747      ],
 748      "default_reasoning_effort": "medium",
 749      "supports_attachments": true,
 750      "options": {}
 751    },
 752    {
 753      "id": "google/gemini-2.5-pro-preview",
 754      "name": "Google: Gemini 2.5 Pro Preview 06-05",
 755      "cost_per_1m_in": 1.25,
 756      "cost_per_1m_out": 10,
 757      "cost_per_1m_in_cached": 1.625,
 758      "cost_per_1m_out_cached": 0.125,
 759      "context_window": 1048576,
 760      "default_max_tokens": 32768,
 761      "can_reason": true,
 762      "reasoning_levels": [
 763        "low",
 764        "medium",
 765        "high"
 766      ],
 767      "default_reasoning_effort": "medium",
 768      "supports_attachments": true,
 769      "options": {}
 770    },
 771    {
 772      "id": "google/gemini-3-pro-preview",
 773      "name": "Google: Gemini 3 Pro Preview",
 774      "cost_per_1m_in": 2,
 775      "cost_per_1m_out": 12,
 776      "cost_per_1m_in_cached": 2.375,
 777      "cost_per_1m_out_cached": 0.19999999999999998,
 778      "context_window": 1048576,
 779      "default_max_tokens": 32768,
 780      "can_reason": true,
 781      "reasoning_levels": [
 782        "low",
 783        "medium",
 784        "high"
 785      ],
 786      "default_reasoning_effort": "medium",
 787      "supports_attachments": true,
 788      "options": {}
 789    },
 790    {
 791      "id": "google/gemma-3-27b-it",
 792      "name": "Google: Gemma 3 27B",
 793      "cost_per_1m_in": 0.13,
 794      "cost_per_1m_out": 0.52,
 795      "cost_per_1m_in_cached": 0,
 796      "cost_per_1m_out_cached": 0,
 797      "context_window": 96000,
 798      "default_max_tokens": 48000,
 799      "can_reason": false,
 800      "supports_attachments": true,
 801      "options": {}
 802    },
 803    {
 804      "id": "inception/mercury",
 805      "name": "Inception: Mercury",
 806      "cost_per_1m_in": 0.25,
 807      "cost_per_1m_out": 1,
 808      "cost_per_1m_in_cached": 0,
 809      "cost_per_1m_out_cached": 0,
 810      "context_window": 128000,
 811      "default_max_tokens": 8192,
 812      "can_reason": false,
 813      "supports_attachments": false,
 814      "options": {}
 815    },
 816    {
 817      "id": "inception/mercury-coder",
 818      "name": "Inception: Mercury Coder",
 819      "cost_per_1m_in": 0.25,
 820      "cost_per_1m_out": 1,
 821      "cost_per_1m_in_cached": 0,
 822      "cost_per_1m_out_cached": 0,
 823      "context_window": 128000,
 824      "default_max_tokens": 8192,
 825      "can_reason": false,
 826      "supports_attachments": false,
 827      "options": {}
 828    },
 829    {
 830      "id": "kwaipilot/kat-coder-pro:free",
 831      "name": "Kwaipilot: KAT-Coder-Pro V1 (free)",
 832      "cost_per_1m_in": 0,
 833      "cost_per_1m_out": 0,
 834      "cost_per_1m_in_cached": 0,
 835      "cost_per_1m_out_cached": 0,
 836      "context_window": 256000,
 837      "default_max_tokens": 16000,
 838      "can_reason": false,
 839      "supports_attachments": false,
 840      "options": {}
 841    },
 842    {
 843      "id": "meituan/longcat-flash-chat:free",
 844      "name": "Meituan: LongCat Flash Chat (free)",
 845      "cost_per_1m_in": 0,
 846      "cost_per_1m_out": 0,
 847      "cost_per_1m_in_cached": 0,
 848      "cost_per_1m_out_cached": 0,
 849      "context_window": 131072,
 850      "default_max_tokens": 65536,
 851      "can_reason": false,
 852      "supports_attachments": false,
 853      "options": {}
 854    },
 855    {
 856      "id": "meta-llama/llama-3.1-70b-instruct",
 857      "name": "Meta: Llama 3.1 70B Instruct",
 858      "cost_per_1m_in": 0.39999999999999997,
 859      "cost_per_1m_out": 0.39999999999999997,
 860      "cost_per_1m_in_cached": 0,
 861      "cost_per_1m_out_cached": 0,
 862      "context_window": 131072,
 863      "default_max_tokens": 8192,
 864      "can_reason": false,
 865      "supports_attachments": false,
 866      "options": {}
 867    },
 868    {
 869      "id": "meta-llama/llama-3.1-8b-instruct",
 870      "name": "Meta: Llama 3.1 8B Instruct",
 871      "cost_per_1m_in": 0.02,
 872      "cost_per_1m_out": 0.03,
 873      "cost_per_1m_in_cached": 0,
 874      "cost_per_1m_out_cached": 0,
 875      "context_window": 131072,
 876      "default_max_tokens": 8192,
 877      "can_reason": false,
 878      "supports_attachments": false,
 879      "options": {}
 880    },
 881    {
 882      "id": "meta-llama/llama-3.2-3b-instruct",
 883      "name": "Meta: Llama 3.2 3B Instruct",
 884      "cost_per_1m_in": 0.03,
 885      "cost_per_1m_out": 0.049999999999999996,
 886      "cost_per_1m_in_cached": 0,
 887      "cost_per_1m_out_cached": 0,
 888      "context_window": 32768,
 889      "default_max_tokens": 16000,
 890      "can_reason": false,
 891      "supports_attachments": false,
 892      "options": {}
 893    },
 894    {
 895      "id": "meta-llama/llama-3.3-70b-instruct",
 896      "name": "Meta: Llama 3.3 70B Instruct",
 897      "cost_per_1m_in": 0.13,
 898      "cost_per_1m_out": 0.38,
 899      "cost_per_1m_in_cached": 0,
 900      "cost_per_1m_out_cached": 0,
 901      "context_window": 131072,
 902      "default_max_tokens": 8192,
 903      "can_reason": false,
 904      "supports_attachments": false,
 905      "options": {}
 906    },
 907    {
 908      "id": "meta-llama/llama-3.3-70b-instruct:free",
 909      "name": "Meta: Llama 3.3 70B Instruct (free)",
 910      "cost_per_1m_in": 0,
 911      "cost_per_1m_out": 0,
 912      "cost_per_1m_in_cached": 0,
 913      "cost_per_1m_out_cached": 0,
 914      "context_window": 131072,
 915      "default_max_tokens": 13107,
 916      "can_reason": false,
 917      "supports_attachments": false,
 918      "options": {}
 919    },
 920    {
 921      "id": "meta-llama/llama-4-maverick",
 922      "name": "Meta: Llama 4 Maverick",
 923      "cost_per_1m_in": 0.27,
 924      "cost_per_1m_out": 0.85,
 925      "cost_per_1m_in_cached": 0,
 926      "cost_per_1m_out_cached": 0,
 927      "context_window": 1048576,
 928      "default_max_tokens": 104857,
 929      "can_reason": false,
 930      "supports_attachments": true,
 931      "options": {}
 932    },
 933    {
 934      "id": "meta-llama/llama-4-scout",
 935      "name": "Meta: Llama 4 Scout",
 936      "cost_per_1m_in": 0.25,
 937      "cost_per_1m_out": 0.7,
 938      "cost_per_1m_in_cached": 0,
 939      "cost_per_1m_out_cached": 0,
 940      "context_window": 1310720,
 941      "default_max_tokens": 4096,
 942      "can_reason": false,
 943      "supports_attachments": true,
 944      "options": {}
 945    },
 946    {
 947      "id": "microsoft/phi-3-medium-128k-instruct",
 948      "name": "Microsoft: Phi-3 Medium 128K Instruct",
 949      "cost_per_1m_in": 1,
 950      "cost_per_1m_out": 1,
 951      "cost_per_1m_in_cached": 0,
 952      "cost_per_1m_out_cached": 0,
 953      "context_window": 128000,
 954      "default_max_tokens": 12800,
 955      "can_reason": false,
 956      "supports_attachments": false,
 957      "options": {}
 958    },
 959    {
 960      "id": "microsoft/phi-3-mini-128k-instruct",
 961      "name": "Microsoft: Phi-3 Mini 128K Instruct",
 962      "cost_per_1m_in": 0.09999999999999999,
 963      "cost_per_1m_out": 0.09999999999999999,
 964      "cost_per_1m_in_cached": 0,
 965      "cost_per_1m_out_cached": 0,
 966      "context_window": 128000,
 967      "default_max_tokens": 12800,
 968      "can_reason": false,
 969      "supports_attachments": false,
 970      "options": {}
 971    },
 972    {
 973      "id": "microsoft/phi-3.5-mini-128k-instruct",
 974      "name": "Microsoft: Phi-3.5 Mini 128K Instruct",
 975      "cost_per_1m_in": 0.09999999999999999,
 976      "cost_per_1m_out": 0.09999999999999999,
 977      "cost_per_1m_in_cached": 0,
 978      "cost_per_1m_out_cached": 0,
 979      "context_window": 128000,
 980      "default_max_tokens": 12800,
 981      "can_reason": false,
 982      "supports_attachments": false,
 983      "options": {}
 984    },
 985    {
 986      "id": "minimax/minimax-m2",
 987      "name": "MiniMax: MiniMax M2",
 988      "cost_per_1m_in": 0.3,
 989      "cost_per_1m_out": 1.2,
 990      "cost_per_1m_in_cached": 0,
 991      "cost_per_1m_out_cached": 0,
 992      "context_window": 204800,
 993      "default_max_tokens": 65536,
 994      "can_reason": true,
 995      "reasoning_levels": [
 996        "low",
 997        "medium",
 998        "high"
 999      ],
1000      "default_reasoning_effort": "medium",
1001      "supports_attachments": false,
1002      "options": {}
1003    },
1004    {
1005      "id": "mistralai/mistral-large",
1006      "name": "Mistral Large",
1007      "cost_per_1m_in": 2,
1008      "cost_per_1m_out": 6,
1009      "cost_per_1m_in_cached": 0,
1010      "cost_per_1m_out_cached": 0,
1011      "context_window": 128000,
1012      "default_max_tokens": 12800,
1013      "can_reason": false,
1014      "supports_attachments": false,
1015      "options": {}
1016    },
1017    {
1018      "id": "mistralai/mistral-large-2407",
1019      "name": "Mistral Large 2407",
1020      "cost_per_1m_in": 2,
1021      "cost_per_1m_out": 6,
1022      "cost_per_1m_in_cached": 0,
1023      "cost_per_1m_out_cached": 0,
1024      "context_window": 131072,
1025      "default_max_tokens": 13107,
1026      "can_reason": false,
1027      "supports_attachments": false,
1028      "options": {}
1029    },
1030    {
1031      "id": "mistralai/mistral-large-2411",
1032      "name": "Mistral Large 2411",
1033      "cost_per_1m_in": 2,
1034      "cost_per_1m_out": 6,
1035      "cost_per_1m_in_cached": 0,
1036      "cost_per_1m_out_cached": 0,
1037      "context_window": 131072,
1038      "default_max_tokens": 13107,
1039      "can_reason": false,
1040      "supports_attachments": false,
1041      "options": {}
1042    },
1043    {
1044      "id": "mistralai/mistral-small",
1045      "name": "Mistral Small",
1046      "cost_per_1m_in": 0.19999999999999998,
1047      "cost_per_1m_out": 0.6,
1048      "cost_per_1m_in_cached": 0,
1049      "cost_per_1m_out_cached": 0,
1050      "context_window": 32768,
1051      "default_max_tokens": 3276,
1052      "can_reason": false,
1053      "supports_attachments": false,
1054      "options": {}
1055    },
1056    {
1057      "id": "mistralai/mistral-tiny",
1058      "name": "Mistral Tiny",
1059      "cost_per_1m_in": 0.25,
1060      "cost_per_1m_out": 0.25,
1061      "cost_per_1m_in_cached": 0,
1062      "cost_per_1m_out_cached": 0,
1063      "context_window": 32768,
1064      "default_max_tokens": 3276,
1065      "can_reason": false,
1066      "supports_attachments": false,
1067      "options": {}
1068    },
1069    {
1070      "id": "mistralai/codestral-2501",
1071      "name": "Mistral: Codestral 2501",
1072      "cost_per_1m_in": 0.3,
1073      "cost_per_1m_out": 0.8999999999999999,
1074      "cost_per_1m_in_cached": 0,
1075      "cost_per_1m_out_cached": 0,
1076      "context_window": 256000,
1077      "default_max_tokens": 25600,
1078      "can_reason": false,
1079      "supports_attachments": false,
1080      "options": {}
1081    },
1082    {
1083      "id": "mistralai/codestral-2508",
1084      "name": "Mistral: Codestral 2508",
1085      "cost_per_1m_in": 0.3,
1086      "cost_per_1m_out": 0.8999999999999999,
1087      "cost_per_1m_in_cached": 0,
1088      "cost_per_1m_out_cached": 0,
1089      "context_window": 256000,
1090      "default_max_tokens": 25600,
1091      "can_reason": false,
1092      "supports_attachments": false,
1093      "options": {}
1094    },
1095    {
1096      "id": "mistralai/devstral-medium",
1097      "name": "Mistral: Devstral Medium",
1098      "cost_per_1m_in": 0.39999999999999997,
1099      "cost_per_1m_out": 2,
1100      "cost_per_1m_in_cached": 0,
1101      "cost_per_1m_out_cached": 0,
1102      "context_window": 131072,
1103      "default_max_tokens": 13107,
1104      "can_reason": false,
1105      "supports_attachments": false,
1106      "options": {}
1107    },
1108    {
1109      "id": "mistralai/devstral-small",
1110      "name": "Mistral: Devstral Small 1.1",
1111      "cost_per_1m_in": 0.09999999999999999,
1112      "cost_per_1m_out": 0.3,
1113      "cost_per_1m_in_cached": 0,
1114      "cost_per_1m_out_cached": 0,
1115      "context_window": 131072,
1116      "default_max_tokens": 13107,
1117      "can_reason": false,
1118      "supports_attachments": false,
1119      "options": {}
1120    },
1121    {
1122      "id": "mistralai/magistral-medium-2506",
1123      "name": "Mistral: Magistral Medium 2506",
1124      "cost_per_1m_in": 2,
1125      "cost_per_1m_out": 5,
1126      "cost_per_1m_in_cached": 0,
1127      "cost_per_1m_out_cached": 0,
1128      "context_window": 40960,
1129      "default_max_tokens": 20000,
1130      "can_reason": true,
1131      "reasoning_levels": [
1132        "low",
1133        "medium",
1134        "high"
1135      ],
1136      "default_reasoning_effort": "medium",
1137      "supports_attachments": false,
1138      "options": {}
1139    },
1140    {
1141      "id": "mistralai/magistral-medium-2506:thinking",
1142      "name": "Mistral: Magistral Medium 2506 (thinking)",
1143      "cost_per_1m_in": 2,
1144      "cost_per_1m_out": 5,
1145      "cost_per_1m_in_cached": 0,
1146      "cost_per_1m_out_cached": 0,
1147      "context_window": 40960,
1148      "default_max_tokens": 20000,
1149      "can_reason": true,
1150      "reasoning_levels": [
1151        "low",
1152        "medium",
1153        "high"
1154      ],
1155      "default_reasoning_effort": "medium",
1156      "supports_attachments": false,
1157      "options": {}
1158    },
1159    {
1160      "id": "mistralai/magistral-small-2506",
1161      "name": "Mistral: Magistral Small 2506",
1162      "cost_per_1m_in": 0.5,
1163      "cost_per_1m_out": 1.5,
1164      "cost_per_1m_in_cached": 0,
1165      "cost_per_1m_out_cached": 0,
1166      "context_window": 40000,
1167      "default_max_tokens": 20000,
1168      "can_reason": true,
1169      "reasoning_levels": [
1170        "low",
1171        "medium",
1172        "high"
1173      ],
1174      "default_reasoning_effort": "medium",
1175      "supports_attachments": false,
1176      "options": {}
1177    },
1178    {
1179      "id": "mistralai/ministral-3b",
1180      "name": "Mistral: Ministral 3B",
1181      "cost_per_1m_in": 0.04,
1182      "cost_per_1m_out": 0.04,
1183      "cost_per_1m_in_cached": 0,
1184      "cost_per_1m_out_cached": 0,
1185      "context_window": 131072,
1186      "default_max_tokens": 13107,
1187      "can_reason": false,
1188      "supports_attachments": false,
1189      "options": {}
1190    },
1191    {
1192      "id": "mistralai/ministral-8b",
1193      "name": "Mistral: Ministral 8B",
1194      "cost_per_1m_in": 0.09999999999999999,
1195      "cost_per_1m_out": 0.09999999999999999,
1196      "cost_per_1m_in_cached": 0,
1197      "cost_per_1m_out_cached": 0,
1198      "context_window": 131072,
1199      "default_max_tokens": 13107,
1200      "can_reason": false,
1201      "supports_attachments": false,
1202      "options": {}
1203    },
1204    {
1205      "id": "mistralai/mistral-7b-instruct",
1206      "name": "Mistral: Mistral 7B Instruct",
1207      "cost_per_1m_in": 0.028,
1208      "cost_per_1m_out": 0.054,
1209      "cost_per_1m_in_cached": 0,
1210      "cost_per_1m_out_cached": 0,
1211      "context_window": 32768,
1212      "default_max_tokens": 8192,
1213      "can_reason": false,
1214      "supports_attachments": false,
1215      "options": {}
1216    },
1217    {
1218      "id": "mistralai/mistral-7b-instruct:free",
1219      "name": "Mistral: Mistral 7B Instruct (free)",
1220      "cost_per_1m_in": 0,
1221      "cost_per_1m_out": 0,
1222      "cost_per_1m_in_cached": 0,
1223      "cost_per_1m_out_cached": 0,
1224      "context_window": 32768,
1225      "default_max_tokens": 8192,
1226      "can_reason": false,
1227      "supports_attachments": false,
1228      "options": {}
1229    },
1230    {
1231      "id": "mistralai/mistral-medium-3",
1232      "name": "Mistral: Mistral Medium 3",
1233      "cost_per_1m_in": 0.39999999999999997,
1234      "cost_per_1m_out": 2,
1235      "cost_per_1m_in_cached": 0,
1236      "cost_per_1m_out_cached": 0,
1237      "context_window": 131072,
1238      "default_max_tokens": 13107,
1239      "can_reason": false,
1240      "supports_attachments": true,
1241      "options": {}
1242    },
1243    {
1244      "id": "mistralai/mistral-medium-3.1",
1245      "name": "Mistral: Mistral Medium 3.1",
1246      "cost_per_1m_in": 0.39999999999999997,
1247      "cost_per_1m_out": 2,
1248      "cost_per_1m_in_cached": 0,
1249      "cost_per_1m_out_cached": 0,
1250      "context_window": 131072,
1251      "default_max_tokens": 13107,
1252      "can_reason": false,
1253      "supports_attachments": true,
1254      "options": {}
1255    },
1256    {
1257      "id": "mistralai/mistral-nemo",
1258      "name": "Mistral: Mistral Nemo",
1259      "cost_per_1m_in": 0.15,
1260      "cost_per_1m_out": 0.15,
1261      "cost_per_1m_in_cached": 0,
1262      "cost_per_1m_out_cached": 0,
1263      "context_window": 131072,
1264      "default_max_tokens": 13107,
1265      "can_reason": false,
1266      "supports_attachments": false,
1267      "options": {}
1268    },
1269    {
1270      "id": "mistralai/mistral-small-24b-instruct-2501",
1271      "name": "Mistral: Mistral Small 3",
1272      "cost_per_1m_in": 0.09999999999999999,
1273      "cost_per_1m_out": 0.3,
1274      "cost_per_1m_in_cached": 0,
1275      "cost_per_1m_out_cached": 0,
1276      "context_window": 32768,
1277      "default_max_tokens": 3276,
1278      "can_reason": false,
1279      "supports_attachments": false,
1280      "options": {}
1281    },
1282    {
1283      "id": "mistralai/mistral-small-3.1-24b-instruct",
1284      "name": "Mistral: Mistral Small 3.1 24B",
1285      "cost_per_1m_in": 0.09999999999999999,
1286      "cost_per_1m_out": 0.3,
1287      "cost_per_1m_in_cached": 0,
1288      "cost_per_1m_out_cached": 0,
1289      "context_window": 131072,
1290      "default_max_tokens": 13107,
1291      "can_reason": false,
1292      "supports_attachments": true,
1293      "options": {}
1294    },
1295    {
1296      "id": "mistralai/mistral-small-3.1-24b-instruct:free",
1297      "name": "Mistral: Mistral Small 3.1 24B (free)",
1298      "cost_per_1m_in": 0,
1299      "cost_per_1m_out": 0,
1300      "cost_per_1m_in_cached": 0,
1301      "cost_per_1m_out_cached": 0,
1302      "context_window": 96000,
1303      "default_max_tokens": 48000,
1304      "can_reason": false,
1305      "supports_attachments": true,
1306      "options": {}
1307    },
1308    {
1309      "id": "mistralai/mistral-small-3.2-24b-instruct",
1310      "name": "Mistral: Mistral Small 3.2 24B",
1311      "cost_per_1m_in": 0.09999999999999999,
1312      "cost_per_1m_out": 0.3,
1313      "cost_per_1m_in_cached": 0,
1314      "cost_per_1m_out_cached": 0,
1315      "context_window": 131072,
1316      "default_max_tokens": 13107,
1317      "can_reason": false,
1318      "supports_attachments": true,
1319      "options": {}
1320    },
1321    {
1322      "id": "mistralai/mistral-small-3.2-24b-instruct:free",
1323      "name": "Mistral: Mistral Small 3.2 24B (free)",
1324      "cost_per_1m_in": 0,
1325      "cost_per_1m_out": 0,
1326      "cost_per_1m_in_cached": 0,
1327      "cost_per_1m_out_cached": 0,
1328      "context_window": 131072,
1329      "default_max_tokens": 13107,
1330      "can_reason": false,
1331      "supports_attachments": true,
1332      "options": {}
1333    },
1334    {
1335      "id": "mistralai/mixtral-8x22b-instruct",
1336      "name": "Mistral: Mixtral 8x22B Instruct",
1337      "cost_per_1m_in": 2,
1338      "cost_per_1m_out": 6,
1339      "cost_per_1m_in_cached": 0,
1340      "cost_per_1m_out_cached": 0,
1341      "context_window": 65536,
1342      "default_max_tokens": 6553,
1343      "can_reason": false,
1344      "supports_attachments": false,
1345      "options": {}
1346    },
1347    {
1348      "id": "mistralai/mixtral-8x7b-instruct",
1349      "name": "Mistral: Mixtral 8x7B Instruct",
1350      "cost_per_1m_in": 0.54,
1351      "cost_per_1m_out": 0.54,
1352      "cost_per_1m_in_cached": 0,
1353      "cost_per_1m_out_cached": 0,
1354      "context_window": 32768,
1355      "default_max_tokens": 8192,
1356      "can_reason": false,
1357      "supports_attachments": false,
1358      "options": {}
1359    },
1360    {
1361      "id": "mistralai/pixtral-large-2411",
1362      "name": "Mistral: Pixtral Large 2411",
1363      "cost_per_1m_in": 2,
1364      "cost_per_1m_out": 6,
1365      "cost_per_1m_in_cached": 0,
1366      "cost_per_1m_out_cached": 0,
1367      "context_window": 131072,
1368      "default_max_tokens": 13107,
1369      "can_reason": false,
1370      "supports_attachments": true,
1371      "options": {}
1372    },
1373    {
1374      "id": "mistralai/mistral-saba",
1375      "name": "Mistral: Saba",
1376      "cost_per_1m_in": 0.19999999999999998,
1377      "cost_per_1m_out": 0.6,
1378      "cost_per_1m_in_cached": 0,
1379      "cost_per_1m_out_cached": 0,
1380      "context_window": 32768,
1381      "default_max_tokens": 3276,
1382      "can_reason": false,
1383      "supports_attachments": false,
1384      "options": {}
1385    },
1386    {
1387      "id": "mistralai/voxtral-small-24b-2507",
1388      "name": "Mistral: Voxtral Small 24B 2507",
1389      "cost_per_1m_in": 0.09999999999999999,
1390      "cost_per_1m_out": 0.3,
1391      "cost_per_1m_in_cached": 0,
1392      "cost_per_1m_out_cached": 0,
1393      "context_window": 32000,
1394      "default_max_tokens": 3200,
1395      "can_reason": false,
1396      "supports_attachments": false,
1397      "options": {}
1398    },
1399    {
1400      "id": "moonshotai/kimi-k2",
1401      "name": "MoonshotAI: Kimi K2 0711",
1402      "cost_per_1m_in": 1,
1403      "cost_per_1m_out": 3,
1404      "cost_per_1m_in_cached": 0,
1405      "cost_per_1m_out_cached": 0,
1406      "context_window": 131072,
1407      "default_max_tokens": 65536,
1408      "can_reason": false,
1409      "supports_attachments": false,
1410      "options": {}
1411    },
1412    {
1413      "id": "moonshotai/kimi-k2-0905",
1414      "name": "MoonshotAI: Kimi K2 0905",
1415      "cost_per_1m_in": 1,
1416      "cost_per_1m_out": 3,
1417      "cost_per_1m_in_cached": 0,
1418      "cost_per_1m_out_cached": 0.5,
1419      "context_window": 262144,
1420      "default_max_tokens": 8192,
1421      "can_reason": false,
1422      "supports_attachments": false,
1423      "options": {}
1424    },
1425    {
1426      "id": "moonshotai/kimi-k2-0905:exacto",
1427      "name": "MoonshotAI: Kimi K2 0905 (exacto)",
1428      "cost_per_1m_in": 1,
1429      "cost_per_1m_out": 3,
1430      "cost_per_1m_in_cached": 0,
1431      "cost_per_1m_out_cached": 0,
1432      "context_window": 262144,
1433      "default_max_tokens": 8192,
1434      "can_reason": false,
1435      "supports_attachments": false,
1436      "options": {}
1437    },
1438    {
1439      "id": "moonshotai/kimi-k2-thinking",
1440      "name": "MoonshotAI: Kimi K2 Thinking",
1441      "cost_per_1m_in": 0.5,
1442      "cost_per_1m_out": 2.5,
1443      "cost_per_1m_in_cached": 0,
1444      "cost_per_1m_out_cached": 0,
1445      "context_window": 262144,
1446      "default_max_tokens": 131072,
1447      "can_reason": true,
1448      "reasoning_levels": [
1449        "low",
1450        "medium",
1451        "high"
1452      ],
1453      "default_reasoning_effort": "medium",
1454      "supports_attachments": false,
1455      "options": {}
1456    },
1457    {
1458      "id": "nvidia/llama-3.1-nemotron-70b-instruct",
1459      "name": "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
1460      "cost_per_1m_in": 1.2,
1461      "cost_per_1m_out": 1.2,
1462      "cost_per_1m_in_cached": 0,
1463      "cost_per_1m_out_cached": 0,
1464      "context_window": 131072,
1465      "default_max_tokens": 8192,
1466      "can_reason": false,
1467      "supports_attachments": false,
1468      "options": {}
1469    },
1470    {
1471      "id": "nvidia/llama-3.3-nemotron-super-49b-v1.5",
1472      "name": "NVIDIA: Llama 3.3 Nemotron Super 49B V1.5",
1473      "cost_per_1m_in": 0.09999999999999999,
1474      "cost_per_1m_out": 0.39999999999999997,
1475      "cost_per_1m_in_cached": 0,
1476      "cost_per_1m_out_cached": 0,
1477      "context_window": 131072,
1478      "default_max_tokens": 13107,
1479      "can_reason": true,
1480      "reasoning_levels": [
1481        "low",
1482        "medium",
1483        "high"
1484      ],
1485      "default_reasoning_effort": "medium",
1486      "supports_attachments": false,
1487      "options": {}
1488    },
1489    {
1490      "id": "nvidia/nemotron-nano-12b-v2-vl:free",
1491      "name": "NVIDIA: Nemotron Nano 12B 2 VL (free)",
1492      "cost_per_1m_in": 0,
1493      "cost_per_1m_out": 0,
1494      "cost_per_1m_in_cached": 0,
1495      "cost_per_1m_out_cached": 0,
1496      "context_window": 128000,
1497      "default_max_tokens": 64000,
1498      "can_reason": true,
1499      "reasoning_levels": [
1500        "low",
1501        "medium",
1502        "high"
1503      ],
1504      "default_reasoning_effort": "medium",
1505      "supports_attachments": true,
1506      "options": {}
1507    },
1508    {
1509      "id": "nvidia/nemotron-nano-9b-v2",
1510      "name": "NVIDIA: Nemotron Nano 9B V2",
1511      "cost_per_1m_in": 0.04,
1512      "cost_per_1m_out": 0.16,
1513      "cost_per_1m_in_cached": 0,
1514      "cost_per_1m_out_cached": 0,
1515      "context_window": 131072,
1516      "default_max_tokens": 13107,
1517      "can_reason": true,
1518      "reasoning_levels": [
1519        "low",
1520        "medium",
1521        "high"
1522      ],
1523      "default_reasoning_effort": "medium",
1524      "supports_attachments": false,
1525      "options": {}
1526    },
1527    {
1528      "id": "nvidia/nemotron-nano-9b-v2:free",
1529      "name": "NVIDIA: Nemotron Nano 9B V2 (free)",
1530      "cost_per_1m_in": 0,
1531      "cost_per_1m_out": 0,
1532      "cost_per_1m_in_cached": 0,
1533      "cost_per_1m_out_cached": 0,
1534      "context_window": 128000,
1535      "default_max_tokens": 12800,
1536      "can_reason": true,
1537      "reasoning_levels": [
1538        "low",
1539        "medium",
1540        "high"
1541      ],
1542      "default_reasoning_effort": "medium",
1543      "supports_attachments": false,
1544      "options": {}
1545    },
1546    {
1547      "id": "nousresearch/deephermes-3-mistral-24b-preview",
1548      "name": "Nous: DeepHermes 3 Mistral 24B Preview",
1549      "cost_per_1m_in": 0.15,
1550      "cost_per_1m_out": 0.59,
1551      "cost_per_1m_in_cached": 0,
1552      "cost_per_1m_out_cached": 0,
1553      "context_window": 32768,
1554      "default_max_tokens": 16384,
1555      "can_reason": true,
1556      "reasoning_levels": [
1557        "low",
1558        "medium",
1559        "high"
1560      ],
1561      "default_reasoning_effort": "medium",
1562      "supports_attachments": false,
1563      "options": {}
1564    },
1565    {
1566      "id": "nousresearch/hermes-4-405b",
1567      "name": "Nous: Hermes 4 405B",
1568      "cost_per_1m_in": 0.3,
1569      "cost_per_1m_out": 1.2,
1570      "cost_per_1m_in_cached": 0,
1571      "cost_per_1m_out_cached": 0,
1572      "context_window": 131072,
1573      "default_max_tokens": 65536,
1574      "can_reason": true,
1575      "reasoning_levels": [
1576        "low",
1577        "medium",
1578        "high"
1579      ],
1580      "default_reasoning_effort": "medium",
1581      "supports_attachments": false,
1582      "options": {}
1583    },
1584    {
1585      "id": "openai/codex-mini",
1586      "name": "OpenAI: Codex Mini",
1587      "cost_per_1m_in": 1.5,
1588      "cost_per_1m_out": 6,
1589      "cost_per_1m_in_cached": 0,
1590      "cost_per_1m_out_cached": 0.375,
1591      "context_window": 200000,
1592      "default_max_tokens": 50000,
1593      "can_reason": true,
1594      "reasoning_levels": [
1595        "low",
1596        "medium",
1597        "high"
1598      ],
1599      "default_reasoning_effort": "medium",
1600      "supports_attachments": true,
1601      "options": {}
1602    },
1603    {
1604      "id": "openai/gpt-4-turbo",
1605      "name": "OpenAI: GPT-4 Turbo",
1606      "cost_per_1m_in": 10,
1607      "cost_per_1m_out": 30,
1608      "cost_per_1m_in_cached": 0,
1609      "cost_per_1m_out_cached": 0,
1610      "context_window": 128000,
1611      "default_max_tokens": 2048,
1612      "can_reason": false,
1613      "supports_attachments": true,
1614      "options": {}
1615    },
1616    {
1617      "id": "openai/gpt-4-1106-preview",
1618      "name": "OpenAI: GPT-4 Turbo (older v1106)",
1619      "cost_per_1m_in": 10,
1620      "cost_per_1m_out": 30,
1621      "cost_per_1m_in_cached": 0,
1622      "cost_per_1m_out_cached": 0,
1623      "context_window": 128000,
1624      "default_max_tokens": 2048,
1625      "can_reason": false,
1626      "supports_attachments": false,
1627      "options": {}
1628    },
1629    {
1630      "id": "openai/gpt-4-turbo-preview",
1631      "name": "OpenAI: GPT-4 Turbo Preview",
1632      "cost_per_1m_in": 10,
1633      "cost_per_1m_out": 30,
1634      "cost_per_1m_in_cached": 0,
1635      "cost_per_1m_out_cached": 0,
1636      "context_window": 128000,
1637      "default_max_tokens": 2048,
1638      "can_reason": false,
1639      "supports_attachments": false,
1640      "options": {}
1641    },
1642    {
1643      "id": "openai/gpt-4.1",
1644      "name": "OpenAI: GPT-4.1",
1645      "cost_per_1m_in": 2,
1646      "cost_per_1m_out": 8,
1647      "cost_per_1m_in_cached": 0,
1648      "cost_per_1m_out_cached": 0.5,
1649      "context_window": 1047576,
1650      "default_max_tokens": 16384,
1651      "can_reason": false,
1652      "supports_attachments": true,
1653      "options": {}
1654    },
1655    {
1656      "id": "openai/gpt-4.1-mini",
1657      "name": "OpenAI: GPT-4.1 Mini",
1658      "cost_per_1m_in": 0.39999999999999997,
1659      "cost_per_1m_out": 1.5999999999999999,
1660      "cost_per_1m_in_cached": 0,
1661      "cost_per_1m_out_cached": 0.09999999999999999,
1662      "context_window": 1047576,
1663      "default_max_tokens": 104757,
1664      "can_reason": false,
1665      "supports_attachments": true,
1666      "options": {}
1667    },
1668    {
1669      "id": "openai/gpt-4.1-nano",
1670      "name": "OpenAI: GPT-4.1 Nano",
1671      "cost_per_1m_in": 0.09999999999999999,
1672      "cost_per_1m_out": 0.39999999999999997,
1673      "cost_per_1m_in_cached": 0,
1674      "cost_per_1m_out_cached": 0.024999999999999998,
1675      "context_window": 1047576,
1676      "default_max_tokens": 16384,
1677      "can_reason": false,
1678      "supports_attachments": true,
1679      "options": {}
1680    },
1681    {
1682      "id": "openai/gpt-4o",
1683      "name": "OpenAI: GPT-4o",
1684      "cost_per_1m_in": 2.5,
1685      "cost_per_1m_out": 10,
1686      "cost_per_1m_in_cached": 0,
1687      "cost_per_1m_out_cached": 0,
1688      "context_window": 128000,
1689      "default_max_tokens": 8192,
1690      "can_reason": false,
1691      "supports_attachments": true,
1692      "options": {}
1693    },
1694    {
1695      "id": "openai/gpt-4o-2024-05-13",
1696      "name": "OpenAI: GPT-4o (2024-05-13)",
1697      "cost_per_1m_in": 5,
1698      "cost_per_1m_out": 15,
1699      "cost_per_1m_in_cached": 0,
1700      "cost_per_1m_out_cached": 0,
1701      "context_window": 128000,
1702      "default_max_tokens": 2048,
1703      "can_reason": false,
1704      "supports_attachments": true,
1705      "options": {}
1706    },
1707    {
1708      "id": "openai/gpt-4o-2024-08-06",
1709      "name": "OpenAI: GPT-4o (2024-08-06)",
1710      "cost_per_1m_in": 2.5,
1711      "cost_per_1m_out": 10,
1712      "cost_per_1m_in_cached": 0,
1713      "cost_per_1m_out_cached": 1.25,
1714      "context_window": 128000,
1715      "default_max_tokens": 8192,
1716      "can_reason": false,
1717      "supports_attachments": true,
1718      "options": {}
1719    },
1720    {
1721      "id": "openai/gpt-4o-2024-11-20",
1722      "name": "OpenAI: GPT-4o (2024-11-20)",
1723      "cost_per_1m_in": 2.5,
1724      "cost_per_1m_out": 10,
1725      "cost_per_1m_in_cached": 0,
1726      "cost_per_1m_out_cached": 1.25,
1727      "context_window": 128000,
1728      "default_max_tokens": 8192,
1729      "can_reason": false,
1730      "supports_attachments": true,
1731      "options": {}
1732    },
1733    {
1734      "id": "openai/gpt-4o:extended",
1735      "name": "OpenAI: GPT-4o (extended)",
1736      "cost_per_1m_in": 6,
1737      "cost_per_1m_out": 18,
1738      "cost_per_1m_in_cached": 0,
1739      "cost_per_1m_out_cached": 0,
1740      "context_window": 128000,
1741      "default_max_tokens": 32000,
1742      "can_reason": false,
1743      "supports_attachments": true,
1744      "options": {}
1745    },
1746    {
1747      "id": "openai/gpt-4o-audio-preview",
1748      "name": "OpenAI: GPT-4o Audio",
1749      "cost_per_1m_in": 2.5,
1750      "cost_per_1m_out": 10,
1751      "cost_per_1m_in_cached": 0,
1752      "cost_per_1m_out_cached": 0,
1753      "context_window": 128000,
1754      "default_max_tokens": 8192,
1755      "can_reason": false,
1756      "supports_attachments": false,
1757      "options": {}
1758    },
1759    {
1760      "id": "openai/gpt-4o-mini",
1761      "name": "OpenAI: GPT-4o-mini",
1762      "cost_per_1m_in": 0.15,
1763      "cost_per_1m_out": 0.6,
1764      "cost_per_1m_in_cached": 0,
1765      "cost_per_1m_out_cached": 0.075,
1766      "context_window": 128000,
1767      "default_max_tokens": 8192,
1768      "can_reason": false,
1769      "supports_attachments": true,
1770      "options": {}
1771    },
1772    {
1773      "id": "openai/gpt-4o-mini-2024-07-18",
1774      "name": "OpenAI: GPT-4o-mini (2024-07-18)",
1775      "cost_per_1m_in": 0.15,
1776      "cost_per_1m_out": 0.6,
1777      "cost_per_1m_in_cached": 0,
1778      "cost_per_1m_out_cached": 0.075,
1779      "context_window": 128000,
1780      "default_max_tokens": 8192,
1781      "can_reason": false,
1782      "supports_attachments": true,
1783      "options": {}
1784    },
1785    {
1786      "id": "openai/gpt-5",
1787      "name": "OpenAI: GPT-5",
1788      "cost_per_1m_in": 1.25,
1789      "cost_per_1m_out": 10,
1790      "cost_per_1m_in_cached": 0,
1791      "cost_per_1m_out_cached": 0.125,
1792      "context_window": 400000,
1793      "default_max_tokens": 64000,
1794      "can_reason": true,
1795      "reasoning_levels": [
1796        "low",
1797        "medium",
1798        "high"
1799      ],
1800      "default_reasoning_effort": "medium",
1801      "supports_attachments": true,
1802      "options": {}
1803    },
1804    {
1805      "id": "openai/gpt-5-codex",
1806      "name": "OpenAI: GPT-5 Codex",
1807      "cost_per_1m_in": 1.25,
1808      "cost_per_1m_out": 10,
1809      "cost_per_1m_in_cached": 0,
1810      "cost_per_1m_out_cached": 0.125,
1811      "context_window": 400000,
1812      "default_max_tokens": 64000,
1813      "can_reason": true,
1814      "reasoning_levels": [
1815        "low",
1816        "medium",
1817        "high"
1818      ],
1819      "default_reasoning_effort": "medium",
1820      "supports_attachments": true,
1821      "options": {}
1822    },
1823    {
1824      "id": "openai/gpt-5-image",
1825      "name": "OpenAI: GPT-5 Image",
1826      "cost_per_1m_in": 10,
1827      "cost_per_1m_out": 10,
1828      "cost_per_1m_in_cached": 0,
1829      "cost_per_1m_out_cached": 1.25,
1830      "context_window": 400000,
1831      "default_max_tokens": 64000,
1832      "can_reason": true,
1833      "reasoning_levels": [
1834        "low",
1835        "medium",
1836        "high"
1837      ],
1838      "default_reasoning_effort": "medium",
1839      "supports_attachments": true,
1840      "options": {}
1841    },
1842    {
1843      "id": "openai/gpt-5-image-mini",
1844      "name": "OpenAI: GPT-5 Image Mini",
1845      "cost_per_1m_in": 2.5,
1846      "cost_per_1m_out": 2,
1847      "cost_per_1m_in_cached": 0,
1848      "cost_per_1m_out_cached": 0.25,
1849      "context_window": 400000,
1850      "default_max_tokens": 64000,
1851      "can_reason": true,
1852      "reasoning_levels": [
1853        "low",
1854        "medium",
1855        "high"
1856      ],
1857      "default_reasoning_effort": "medium",
1858      "supports_attachments": true,
1859      "options": {}
1860    },
1861    {
1862      "id": "openai/gpt-5-mini",
1863      "name": "OpenAI: GPT-5 Mini",
1864      "cost_per_1m_in": 0.25,
1865      "cost_per_1m_out": 2,
1866      "cost_per_1m_in_cached": 0,
1867      "cost_per_1m_out_cached": 0.03,
1868      "context_window": 400000,
1869      "default_max_tokens": 40000,
1870      "can_reason": true,
1871      "reasoning_levels": [
1872        "low",
1873        "medium",
1874        "high"
1875      ],
1876      "default_reasoning_effort": "medium",
1877      "supports_attachments": true,
1878      "options": {}
1879    },
1880    {
1881      "id": "openai/gpt-5-nano",
1882      "name": "OpenAI: GPT-5 Nano",
1883      "cost_per_1m_in": 0.049999999999999996,
1884      "cost_per_1m_out": 0.39999999999999997,
1885      "cost_per_1m_in_cached": 0,
1886      "cost_per_1m_out_cached": 0.005,
1887      "context_window": 400000,
1888      "default_max_tokens": 64000,
1889      "can_reason": true,
1890      "reasoning_levels": [
1891        "low",
1892        "medium",
1893        "high"
1894      ],
1895      "default_reasoning_effort": "medium",
1896      "supports_attachments": true,
1897      "options": {}
1898    },
1899    {
1900      "id": "openai/gpt-5-pro",
1901      "name": "OpenAI: GPT-5 Pro",
1902      "cost_per_1m_in": 15,
1903      "cost_per_1m_out": 120,
1904      "cost_per_1m_in_cached": 0,
1905      "cost_per_1m_out_cached": 0,
1906      "context_window": 400000,
1907      "default_max_tokens": 64000,
1908      "can_reason": true,
1909      "reasoning_levels": [
1910        "low",
1911        "medium",
1912        "high"
1913      ],
1914      "default_reasoning_effort": "medium",
1915      "supports_attachments": true,
1916      "options": {}
1917    },
1918    {
1919      "id": "openai/gpt-5.1",
1920      "name": "OpenAI: GPT-5.1",
1921      "cost_per_1m_in": 1.25,
1922      "cost_per_1m_out": 10,
1923      "cost_per_1m_in_cached": 0,
1924      "cost_per_1m_out_cached": 0.125,
1925      "context_window": 400000,
1926      "default_max_tokens": 64000,
1927      "can_reason": true,
1928      "reasoning_levels": [
1929        "low",
1930        "medium",
1931        "high"
1932      ],
1933      "default_reasoning_effort": "medium",
1934      "supports_attachments": true,
1935      "options": {}
1936    },
1937    {
1938      "id": "openai/gpt-5.1-chat",
1939      "name": "OpenAI: GPT-5.1 Chat",
1940      "cost_per_1m_in": 1.25,
1941      "cost_per_1m_out": 10,
1942      "cost_per_1m_in_cached": 0,
1943      "cost_per_1m_out_cached": 0.125,
1944      "context_window": 128000,
1945      "default_max_tokens": 8192,
1946      "can_reason": false,
1947      "supports_attachments": true,
1948      "options": {}
1949    },
1950    {
1951      "id": "openai/gpt-5.1-codex",
1952      "name": "OpenAI: GPT-5.1-Codex",
1953      "cost_per_1m_in": 1.25,
1954      "cost_per_1m_out": 10,
1955      "cost_per_1m_in_cached": 0,
1956      "cost_per_1m_out_cached": 0.125,
1957      "context_window": 400000,
1958      "default_max_tokens": 64000,
1959      "can_reason": true,
1960      "reasoning_levels": [
1961        "low",
1962        "medium",
1963        "high"
1964      ],
1965      "default_reasoning_effort": "medium",
1966      "supports_attachments": true,
1967      "options": {}
1968    },
1969    {
1970      "id": "openai/gpt-5.1-codex-mini",
1971      "name": "OpenAI: GPT-5.1-Codex-Mini",
1972      "cost_per_1m_in": 0.25,
1973      "cost_per_1m_out": 2,
1974      "cost_per_1m_in_cached": 0,
1975      "cost_per_1m_out_cached": 0.024999999999999998,
1976      "context_window": 400000,
1977      "default_max_tokens": 50000,
1978      "can_reason": true,
1979      "reasoning_levels": [
1980        "low",
1981        "medium",
1982        "high"
1983      ],
1984      "default_reasoning_effort": "medium",
1985      "supports_attachments": true,
1986      "options": {}
1987    },
1988    {
1989      "id": "openai/gpt-oss-120b",
1990      "name": "OpenAI: gpt-oss-120b",
1991      "cost_per_1m_in": 0.04,
1992      "cost_per_1m_out": 0.39999999999999997,
1993      "cost_per_1m_in_cached": 0,
1994      "cost_per_1m_out_cached": 0,
1995      "context_window": 131072,
1996      "default_max_tokens": 65536,
1997      "can_reason": true,
1998      "reasoning_levels": [
1999        "low",
2000        "medium",
2001        "high"
2002      ],
2003      "default_reasoning_effort": "medium",
2004      "supports_attachments": false,
2005      "options": {}
2006    },
2007    {
2008      "id": "openai/gpt-oss-120b:exacto",
2009      "name": "OpenAI: gpt-oss-120b (exacto)",
2010      "cost_per_1m_in": 0.049999999999999996,
2011      "cost_per_1m_out": 0.24,
2012      "cost_per_1m_in_cached": 0,
2013      "cost_per_1m_out_cached": 0,
2014      "context_window": 131072,
2015      "default_max_tokens": 13107,
2016      "can_reason": true,
2017      "reasoning_levels": [
2018        "low",
2019        "medium",
2020        "high"
2021      ],
2022      "default_reasoning_effort": "medium",
2023      "supports_attachments": false,
2024      "options": {}
2025    },
2026    {
2027      "id": "openai/gpt-oss-20b",
2028      "name": "OpenAI: gpt-oss-20b",
2029      "cost_per_1m_in": 0.03,
2030      "cost_per_1m_out": 0.14,
2031      "cost_per_1m_in_cached": 0,
2032      "cost_per_1m_out_cached": 0,
2033      "context_window": 131072,
2034      "default_max_tokens": 13107,
2035      "can_reason": true,
2036      "reasoning_levels": [
2037        "low",
2038        "medium",
2039        "high"
2040      ],
2041      "default_reasoning_effort": "medium",
2042      "supports_attachments": false,
2043      "options": {}
2044    },
2045    {
2046      "id": "openai/gpt-oss-20b:free",
2047      "name": "OpenAI: gpt-oss-20b (free)",
2048      "cost_per_1m_in": 0,
2049      "cost_per_1m_out": 0,
2050      "cost_per_1m_in_cached": 0,
2051      "cost_per_1m_out_cached": 0,
2052      "context_window": 131072,
2053      "default_max_tokens": 65536,
2054      "can_reason": true,
2055      "reasoning_levels": [
2056        "low",
2057        "medium",
2058        "high"
2059      ],
2060      "default_reasoning_effort": "medium",
2061      "supports_attachments": false,
2062      "options": {}
2063    },
2064    {
2065      "id": "openai/gpt-oss-safeguard-20b",
2066      "name": "OpenAI: gpt-oss-safeguard-20b",
2067      "cost_per_1m_in": 0.075,
2068      "cost_per_1m_out": 0.3,
2069      "cost_per_1m_in_cached": 0,
2070      "cost_per_1m_out_cached": 0.037,
2071      "context_window": 131072,
2072      "default_max_tokens": 32768,
2073      "can_reason": true,
2074      "reasoning_levels": [
2075        "low",
2076        "medium",
2077        "high"
2078      ],
2079      "default_reasoning_effort": "medium",
2080      "supports_attachments": false,
2081      "options": {}
2082    },
2083    {
2084      "id": "openai/o1",
2085      "name": "OpenAI: o1",
2086      "cost_per_1m_in": 15,
2087      "cost_per_1m_out": 60,
2088      "cost_per_1m_in_cached": 0,
2089      "cost_per_1m_out_cached": 7.5,
2090      "context_window": 200000,
2091      "default_max_tokens": 50000,
2092      "can_reason": false,
2093      "supports_attachments": true,
2094      "options": {}
2095    },
2096    {
2097      "id": "openai/o3",
2098      "name": "OpenAI: o3",
2099      "cost_per_1m_in": 2,
2100      "cost_per_1m_out": 8,
2101      "cost_per_1m_in_cached": 0,
2102      "cost_per_1m_out_cached": 0.5,
2103      "context_window": 200000,
2104      "default_max_tokens": 50000,
2105      "can_reason": true,
2106      "reasoning_levels": [
2107        "low",
2108        "medium",
2109        "high"
2110      ],
2111      "default_reasoning_effort": "medium",
2112      "supports_attachments": true,
2113      "options": {}
2114    },
2115    {
2116      "id": "openai/o3-deep-research",
2117      "name": "OpenAI: o3 Deep Research",
2118      "cost_per_1m_in": 10,
2119      "cost_per_1m_out": 40,
2120      "cost_per_1m_in_cached": 0,
2121      "cost_per_1m_out_cached": 2.5,
2122      "context_window": 200000,
2123      "default_max_tokens": 50000,
2124      "can_reason": true,
2125      "reasoning_levels": [
2126        "low",
2127        "medium",
2128        "high"
2129      ],
2130      "default_reasoning_effort": "medium",
2131      "supports_attachments": true,
2132      "options": {}
2133    },
2134    {
2135      "id": "openai/o3-mini",
2136      "name": "OpenAI: o3 Mini",
2137      "cost_per_1m_in": 1.1,
2138      "cost_per_1m_out": 4.4,
2139      "cost_per_1m_in_cached": 0,
2140      "cost_per_1m_out_cached": 0.55,
2141      "context_window": 200000,
2142      "default_max_tokens": 50000,
2143      "can_reason": false,
2144      "supports_attachments": false,
2145      "options": {}
2146    },
2147    {
2148      "id": "openai/o3-mini-high",
2149      "name": "OpenAI: o3 Mini High",
2150      "cost_per_1m_in": 1.1,
2151      "cost_per_1m_out": 4.4,
2152      "cost_per_1m_in_cached": 0,
2153      "cost_per_1m_out_cached": 0.55,
2154      "context_window": 200000,
2155      "default_max_tokens": 50000,
2156      "can_reason": false,
2157      "supports_attachments": false,
2158      "options": {}
2159    },
2160    {
2161      "id": "openai/o3-pro",
2162      "name": "OpenAI: o3 Pro",
2163      "cost_per_1m_in": 20,
2164      "cost_per_1m_out": 80,
2165      "cost_per_1m_in_cached": 0,
2166      "cost_per_1m_out_cached": 0,
2167      "context_window": 200000,
2168      "default_max_tokens": 50000,
2169      "can_reason": true,
2170      "reasoning_levels": [
2171        "low",
2172        "medium",
2173        "high"
2174      ],
2175      "default_reasoning_effort": "medium",
2176      "supports_attachments": true,
2177      "options": {}
2178    },
2179    {
2180      "id": "openai/o4-mini",
2181      "name": "OpenAI: o4 Mini",
2182      "cost_per_1m_in": 1.1,
2183      "cost_per_1m_out": 4.4,
2184      "cost_per_1m_in_cached": 0,
2185      "cost_per_1m_out_cached": 0.275,
2186      "context_window": 200000,
2187      "default_max_tokens": 50000,
2188      "can_reason": true,
2189      "reasoning_levels": [
2190        "low",
2191        "medium",
2192        "high"
2193      ],
2194      "default_reasoning_effort": "medium",
2195      "supports_attachments": true,
2196      "options": {}
2197    },
2198    {
2199      "id": "openai/o4-mini-deep-research",
2200      "name": "OpenAI: o4 Mini Deep Research",
2201      "cost_per_1m_in": 2,
2202      "cost_per_1m_out": 8,
2203      "cost_per_1m_in_cached": 0,
2204      "cost_per_1m_out_cached": 0.5,
2205      "context_window": 200000,
2206      "default_max_tokens": 50000,
2207      "can_reason": true,
2208      "reasoning_levels": [
2209        "low",
2210        "medium",
2211        "high"
2212      ],
2213      "default_reasoning_effort": "medium",
2214      "supports_attachments": true,
2215      "options": {}
2216    },
2217    {
2218      "id": "openai/o4-mini-high",
2219      "name": "OpenAI: o4 Mini High",
2220      "cost_per_1m_in": 1.1,
2221      "cost_per_1m_out": 4.4,
2222      "cost_per_1m_in_cached": 0,
2223      "cost_per_1m_out_cached": 0.275,
2224      "context_window": 200000,
2225      "default_max_tokens": 50000,
2226      "can_reason": true,
2227      "reasoning_levels": [
2228        "low",
2229        "medium",
2230        "high"
2231      ],
2232      "default_reasoning_effort": "medium",
2233      "supports_attachments": true,
2234      "options": {}
2235    },
2236    {
2237      "id": "qwen/qwen-2.5-72b-instruct",
2238      "name": "Qwen2.5 72B Instruct",
2239      "cost_per_1m_in": 0.07,
2240      "cost_per_1m_out": 0.26,
2241      "cost_per_1m_in_cached": 0,
2242      "cost_per_1m_out_cached": 0,
2243      "context_window": 32768,
2244      "default_max_tokens": 16384,
2245      "can_reason": false,
2246      "supports_attachments": false,
2247      "options": {}
2248    },
2249    {
2250      "id": "qwen/qwen-plus-2025-07-28",
2251      "name": "Qwen: Qwen Plus 0728",
2252      "cost_per_1m_in": 0.39999999999999997,
2253      "cost_per_1m_out": 1.2,
2254      "cost_per_1m_in_cached": 0,
2255      "cost_per_1m_out_cached": 0,
2256      "context_window": 1000000,
2257      "default_max_tokens": 16384,
2258      "can_reason": false,
2259      "supports_attachments": false,
2260      "options": {}
2261    },
2262    {
2263      "id": "qwen/qwen-plus-2025-07-28:thinking",
2264      "name": "Qwen: Qwen Plus 0728 (thinking)",
2265      "cost_per_1m_in": 0.39999999999999997,
2266      "cost_per_1m_out": 4,
2267      "cost_per_1m_in_cached": 0,
2268      "cost_per_1m_out_cached": 0,
2269      "context_window": 1000000,
2270      "default_max_tokens": 16384,
2271      "can_reason": true,
2272      "reasoning_levels": [
2273        "low",
2274        "medium",
2275        "high"
2276      ],
2277      "default_reasoning_effort": "medium",
2278      "supports_attachments": false,
2279      "options": {}
2280    },
2281    {
2282      "id": "qwen/qwen-vl-max",
2283      "name": "Qwen: Qwen VL Max",
2284      "cost_per_1m_in": 0.7999999999999999,
2285      "cost_per_1m_out": 3.1999999999999997,
2286      "cost_per_1m_in_cached": 0,
2287      "cost_per_1m_out_cached": 0,
2288      "context_window": 131072,
2289      "default_max_tokens": 4096,
2290      "can_reason": false,
2291      "supports_attachments": true,
2292      "options": {}
2293    },
2294    {
2295      "id": "qwen/qwen-max",
2296      "name": "Qwen: Qwen-Max ",
2297      "cost_per_1m_in": 1.5999999999999999,
2298      "cost_per_1m_out": 6.3999999999999995,
2299      "cost_per_1m_in_cached": 0,
2300      "cost_per_1m_out_cached": 0.64,
2301      "context_window": 32768,
2302      "default_max_tokens": 4096,
2303      "can_reason": false,
2304      "supports_attachments": false,
2305      "options": {}
2306    },
2307    {
2308      "id": "qwen/qwen-plus",
2309      "name": "Qwen: Qwen-Plus",
2310      "cost_per_1m_in": 0.39999999999999997,
2311      "cost_per_1m_out": 1.2,
2312      "cost_per_1m_in_cached": 0,
2313      "cost_per_1m_out_cached": 0.16,
2314      "context_window": 131072,
2315      "default_max_tokens": 4096,
2316      "can_reason": false,
2317      "supports_attachments": false,
2318      "options": {}
2319    },
2320    {
2321      "id": "qwen/qwen-turbo",
2322      "name": "Qwen: Qwen-Turbo",
2323      "cost_per_1m_in": 0.049999999999999996,
2324      "cost_per_1m_out": 0.19999999999999998,
2325      "cost_per_1m_in_cached": 0,
2326      "cost_per_1m_out_cached": 0.02,
2327      "context_window": 1000000,
2328      "default_max_tokens": 4096,
2329      "can_reason": false,
2330      "supports_attachments": false,
2331      "options": {}
2332    },
2333    {
2334      "id": "qwen/qwen3-14b",
2335      "name": "Qwen: Qwen3 14B",
2336      "cost_per_1m_in": 0.049999999999999996,
2337      "cost_per_1m_out": 0.22,
2338      "cost_per_1m_in_cached": 0,
2339      "cost_per_1m_out_cached": 0,
2340      "context_window": 40960,
2341      "default_max_tokens": 20480,
2342      "can_reason": true,
2343      "reasoning_levels": [
2344        "low",
2345        "medium",
2346        "high"
2347      ],
2348      "default_reasoning_effort": "medium",
2349      "supports_attachments": false,
2350      "options": {}
2351    },
2352    {
2353      "id": "qwen/qwen3-235b-a22b",
2354      "name": "Qwen: Qwen3 235B A22B",
2355      "cost_per_1m_in": 0.22,
2356      "cost_per_1m_out": 0.88,
2357      "cost_per_1m_in_cached": 0,
2358      "cost_per_1m_out_cached": 0,
2359      "context_window": 131072,
2360      "default_max_tokens": 13107,
2361      "can_reason": true,
2362      "reasoning_levels": [
2363        "low",
2364        "medium",
2365        "high"
2366      ],
2367      "default_reasoning_effort": "medium",
2368      "supports_attachments": false,
2369      "options": {}
2370    },
2371    {
2372      "id": "qwen/qwen3-235b-a22b:free",
2373      "name": "Qwen: Qwen3 235B A22B (free)",
2374      "cost_per_1m_in": 0,
2375      "cost_per_1m_out": 0,
2376      "cost_per_1m_in_cached": 0,
2377      "cost_per_1m_out_cached": 0,
2378      "context_window": 131072,
2379      "default_max_tokens": 13107,
2380      "can_reason": true,
2381      "reasoning_levels": [
2382        "low",
2383        "medium",
2384        "high"
2385      ],
2386      "default_reasoning_effort": "medium",
2387      "supports_attachments": false,
2388      "options": {}
2389    },
2390    {
2391      "id": "qwen/qwen3-235b-a22b-2507",
2392      "name": "Qwen: Qwen3 235B A22B Instruct 2507",
2393      "cost_per_1m_in": 0.09999999999999999,
2394      "cost_per_1m_out": 0.09999999999999999,
2395      "cost_per_1m_in_cached": 0,
2396      "cost_per_1m_out_cached": 0,
2397      "context_window": 262144,
2398      "default_max_tokens": 131072,
2399      "can_reason": false,
2400      "supports_attachments": false,
2401      "options": {}
2402    },
2403    {
2404      "id": "qwen/qwen3-235b-a22b-thinking-2507",
2405      "name": "Qwen: Qwen3 235B A22B Thinking 2507",
2406      "cost_per_1m_in": 0.65,
2407      "cost_per_1m_out": 3,
2408      "cost_per_1m_in_cached": 0,
2409      "cost_per_1m_out_cached": 0,
2410      "context_window": 262144,
2411      "default_max_tokens": 26214,
2412      "can_reason": true,
2413      "reasoning_levels": [
2414        "low",
2415        "medium",
2416        "high"
2417      ],
2418      "default_reasoning_effort": "medium",
2419      "supports_attachments": false,
2420      "options": {}
2421    },
2422    {
2423      "id": "qwen/qwen3-30b-a3b",
2424      "name": "Qwen: Qwen3 30B A3B",
2425      "cost_per_1m_in": 0.08,
2426      "cost_per_1m_out": 0.28,
2427      "cost_per_1m_in_cached": 0,
2428      "cost_per_1m_out_cached": 0,
2429      "context_window": 131072,
2430      "default_max_tokens": 65536,
2431      "can_reason": true,
2432      "reasoning_levels": [
2433        "low",
2434        "medium",
2435        "high"
2436      ],
2437      "default_reasoning_effort": "medium",
2438      "supports_attachments": false,
2439      "options": {}
2440    },
2441    {
2442      "id": "qwen/qwen3-30b-a3b-instruct-2507",
2443      "name": "Qwen: Qwen3 30B A3B Instruct 2507",
2444      "cost_per_1m_in": 0.08,
2445      "cost_per_1m_out": 0.33,
2446      "cost_per_1m_in_cached": 0,
2447      "cost_per_1m_out_cached": 0,
2448      "context_window": 262144,
2449      "default_max_tokens": 131072,
2450      "can_reason": false,
2451      "supports_attachments": false,
2452      "options": {}
2453    },
2454    {
2455      "id": "qwen/qwen3-32b",
2456      "name": "Qwen: Qwen3 32B",
2457      "cost_per_1m_in": 0.15,
2458      "cost_per_1m_out": 0.5,
2459      "cost_per_1m_in_cached": 0,
2460      "cost_per_1m_out_cached": 0,
2461      "context_window": 131072,
2462      "default_max_tokens": 4000,
2463      "can_reason": true,
2464      "reasoning_levels": [
2465        "low",
2466        "medium",
2467        "high"
2468      ],
2469      "default_reasoning_effort": "medium",
2470      "supports_attachments": false,
2471      "options": {}
2472    },
2473    {
2474      "id": "qwen/qwen3-4b:free",
2475      "name": "Qwen: Qwen3 4B (free)",
2476      "cost_per_1m_in": 0,
2477      "cost_per_1m_out": 0,
2478      "cost_per_1m_in_cached": 0,
2479      "cost_per_1m_out_cached": 0,
2480      "context_window": 40960,
2481      "default_max_tokens": 4096,
2482      "can_reason": true,
2483      "reasoning_levels": [
2484        "low",
2485        "medium",
2486        "high"
2487      ],
2488      "default_reasoning_effort": "medium",
2489      "supports_attachments": false,
2490      "options": {}
2491    },
2492    {
2493      "id": "qwen/qwen3-8b",
2494      "name": "Qwen: Qwen3 8B",
2495      "cost_per_1m_in": 0.2,
2496      "cost_per_1m_out": 0.2,
2497      "cost_per_1m_in_cached": 0,
2498      "cost_per_1m_out_cached": 0,
2499      "context_window": 40960,
2500      "default_max_tokens": 4096,
2501      "can_reason": true,
2502      "reasoning_levels": [
2503        "low",
2504        "medium",
2505        "high"
2506      ],
2507      "default_reasoning_effort": "medium",
2508      "supports_attachments": false,
2509      "options": {}
2510    },
2511    {
2512      "id": "qwen/qwen3-coder-30b-a3b-instruct",
2513      "name": "Qwen: Qwen3 Coder 30B A3B Instruct",
2514      "cost_per_1m_in": 0.06,
2515      "cost_per_1m_out": 0.25,
2516      "cost_per_1m_in_cached": 0,
2517      "cost_per_1m_out_cached": 0,
2518      "context_window": 262144,
2519      "default_max_tokens": 131072,
2520      "can_reason": false,
2521      "supports_attachments": false,
2522      "options": {}
2523    },
2524    {
2525      "id": "qwen/qwen3-coder",
2526      "name": "Qwen: Qwen3 Coder 480B A35B",
2527      "cost_per_1m_in": 0.29,
2528      "cost_per_1m_out": 1.2,
2529      "cost_per_1m_in_cached": 0,
2530      "cost_per_1m_out_cached": 0,
2531      "context_window": 262144,
2532      "default_max_tokens": 32768,
2533      "can_reason": false,
2534      "supports_attachments": false,
2535      "options": {}
2536    },
2537    {
2538      "id": "qwen/qwen3-coder:exacto",
2539      "name": "Qwen: Qwen3 Coder 480B A35B (exacto)",
2540      "cost_per_1m_in": 0.38,
2541      "cost_per_1m_out": 1.53,
2542      "cost_per_1m_in_cached": 0,
2543      "cost_per_1m_out_cached": 0,
2544      "context_window": 262144,
2545      "default_max_tokens": 131072,
2546      "can_reason": true,
2547      "reasoning_levels": [
2548        "low",
2549        "medium",
2550        "high"
2551      ],
2552      "default_reasoning_effort": "medium",
2553      "supports_attachments": false,
2554      "options": {}
2555    },
2556    {
2557      "id": "qwen/qwen3-coder:free",
2558      "name": "Qwen: Qwen3 Coder 480B A35B (free)",
2559      "cost_per_1m_in": 0,
2560      "cost_per_1m_out": 0,
2561      "cost_per_1m_in_cached": 0,
2562      "cost_per_1m_out_cached": 0,
2563      "context_window": 262000,
2564      "default_max_tokens": 131000,
2565      "can_reason": false,
2566      "supports_attachments": false,
2567      "options": {}
2568    },
2569    {
2570      "id": "qwen/qwen3-coder-flash",
2571      "name": "Qwen: Qwen3 Coder Flash",
2572      "cost_per_1m_in": 0.3,
2573      "cost_per_1m_out": 1.5,
2574      "cost_per_1m_in_cached": 0,
2575      "cost_per_1m_out_cached": 0.08,
2576      "context_window": 128000,
2577      "default_max_tokens": 32768,
2578      "can_reason": false,
2579      "supports_attachments": false,
2580      "options": {}
2581    },
2582    {
2583      "id": "qwen/qwen3-coder-plus",
2584      "name": "Qwen: Qwen3 Coder Plus",
2585      "cost_per_1m_in": 1,
2586      "cost_per_1m_out": 5,
2587      "cost_per_1m_in_cached": 0,
2588      "cost_per_1m_out_cached": 0.09999999999999999,
2589      "context_window": 128000,
2590      "default_max_tokens": 32768,
2591      "can_reason": false,
2592      "supports_attachments": false,
2593      "options": {}
2594    },
2595    {
2596      "id": "qwen/qwen3-max",
2597      "name": "Qwen: Qwen3 Max",
2598      "cost_per_1m_in": 1.2,
2599      "cost_per_1m_out": 6,
2600      "cost_per_1m_in_cached": 0,
2601      "cost_per_1m_out_cached": 0.24,
2602      "context_window": 256000,
2603      "default_max_tokens": 16384,
2604      "can_reason": false,
2605      "supports_attachments": false,
2606      "options": {}
2607    },
2608    {
2609      "id": "qwen/qwen3-next-80b-a3b-instruct",
2610      "name": "Qwen: Qwen3 Next 80B A3B Instruct",
2611      "cost_per_1m_in": 0.14,
2612      "cost_per_1m_out": 1.1,
2613      "cost_per_1m_in_cached": 0,
2614      "cost_per_1m_out_cached": 0,
2615      "context_window": 262144,
2616      "default_max_tokens": 26214,
2617      "can_reason": false,
2618      "supports_attachments": false,
2619      "options": {}
2620    },
2621    {
2622      "id": "qwen/qwen3-next-80b-a3b-thinking",
2623      "name": "Qwen: Qwen3 Next 80B A3B Thinking",
2624      "cost_per_1m_in": 0.3,
2625      "cost_per_1m_out": 0.3,
2626      "cost_per_1m_in_cached": 0,
2627      "cost_per_1m_out_cached": 0,
2628      "context_window": 262144,
2629      "default_max_tokens": 131072,
2630      "can_reason": true,
2631      "reasoning_levels": [
2632        "low",
2633        "medium",
2634        "high"
2635      ],
2636      "default_reasoning_effort": "medium",
2637      "supports_attachments": false,
2638      "options": {}
2639    },
2640    {
2641      "id": "qwen/qwen3-vl-235b-a22b-instruct",
2642      "name": "Qwen: Qwen3 VL 235B A22B Instruct",
2643      "cost_per_1m_in": 0.39999999999999997,
2644      "cost_per_1m_out": 1.5999999999999999,
2645      "cost_per_1m_in_cached": 0,
2646      "cost_per_1m_out_cached": 0,
2647      "context_window": 131072,
2648      "default_max_tokens": 16384,
2649      "can_reason": false,
2650      "supports_attachments": true,
2651      "options": {}
2652    },
2653    {
2654      "id": "qwen/qwen3-vl-235b-a22b-thinking",
2655      "name": "Qwen: Qwen3 VL 235B A22B Thinking",
2656      "cost_per_1m_in": 0.3,
2657      "cost_per_1m_out": 1.2,
2658      "cost_per_1m_in_cached": 0,
2659      "cost_per_1m_out_cached": 0,
2660      "context_window": 262144,
2661      "default_max_tokens": 131072,
2662      "can_reason": true,
2663      "reasoning_levels": [
2664        "low",
2665        "medium",
2666        "high"
2667      ],
2668      "default_reasoning_effort": "medium",
2669      "supports_attachments": true,
2670      "options": {}
2671    },
2672    {
2673      "id": "qwen/qwen3-vl-30b-a3b-instruct",
2674      "name": "Qwen: Qwen3 VL 30B A3B Instruct",
2675      "cost_per_1m_in": 0.15,
2676      "cost_per_1m_out": 0.6,
2677      "cost_per_1m_in_cached": 0,
2678      "cost_per_1m_out_cached": 0,
2679      "context_window": 262144,
2680      "default_max_tokens": 26214,
2681      "can_reason": false,
2682      "supports_attachments": true,
2683      "options": {}
2684    },
2685    {
2686      "id": "qwen/qwen3-vl-30b-a3b-thinking",
2687      "name": "Qwen: Qwen3 VL 30B A3B Thinking",
2688      "cost_per_1m_in": 0.19999999999999998,
2689      "cost_per_1m_out": 1,
2690      "cost_per_1m_in_cached": 0,
2691      "cost_per_1m_out_cached": 0,
2692      "context_window": 131072,
2693      "default_max_tokens": 16384,
2694      "can_reason": true,
2695      "reasoning_levels": [
2696        "low",
2697        "medium",
2698        "high"
2699      ],
2700      "default_reasoning_effort": "medium",
2701      "supports_attachments": true,
2702      "options": {}
2703    },
2704    {
2705      "id": "qwen/qwen3-vl-8b-instruct",
2706      "name": "Qwen: Qwen3 VL 8B Instruct",
2707      "cost_per_1m_in": 0.18,
2708      "cost_per_1m_out": 0.7,
2709      "cost_per_1m_in_cached": 0,
2710      "cost_per_1m_out_cached": 0,
2711      "context_window": 256000,
2712      "default_max_tokens": 16384,
2713      "can_reason": false,
2714      "supports_attachments": true,
2715      "options": {}
2716    },
2717    {
2718      "id": "qwen/qwen3-vl-8b-thinking",
2719      "name": "Qwen: Qwen3 VL 8B Thinking",
2720      "cost_per_1m_in": 0.18,
2721      "cost_per_1m_out": 2.0999999999999996,
2722      "cost_per_1m_in_cached": 0,
2723      "cost_per_1m_out_cached": 0,
2724      "context_window": 256000,
2725      "default_max_tokens": 16384,
2726      "can_reason": true,
2727      "reasoning_levels": [
2728        "low",
2729        "medium",
2730        "high"
2731      ],
2732      "default_reasoning_effort": "medium",
2733      "supports_attachments": true,
2734      "options": {}
2735    },
2736    {
2737      "id": "stepfun-ai/step3",
2738      "name": "StepFun: Step3",
2739      "cost_per_1m_in": 0.5700000000000001,
2740      "cost_per_1m_out": 1.42,
2741      "cost_per_1m_in_cached": 0,
2742      "cost_per_1m_out_cached": 0,
2743      "context_window": 65536,
2744      "default_max_tokens": 32768,
2745      "can_reason": true,
2746      "reasoning_levels": [
2747        "low",
2748        "medium",
2749        "high"
2750      ],
2751      "default_reasoning_effort": "medium",
2752      "supports_attachments": true,
2753      "options": {}
2754    },
2755    {
2756      "id": "tngtech/deepseek-r1t2-chimera",
2757      "name": "TNG: DeepSeek R1T2 Chimera",
2758      "cost_per_1m_in": 0.3,
2759      "cost_per_1m_out": 1.2,
2760      "cost_per_1m_in_cached": 0,
2761      "cost_per_1m_out_cached": 0,
2762      "context_window": 163840,
2763      "default_max_tokens": 81920,
2764      "can_reason": true,
2765      "reasoning_levels": [
2766        "low",
2767        "medium",
2768        "high"
2769      ],
2770      "default_reasoning_effort": "medium",
2771      "supports_attachments": false,
2772      "options": {}
2773    },
2774    {
2775      "id": "thedrummer/rocinante-12b",
2776      "name": "TheDrummer: Rocinante 12B",
2777      "cost_per_1m_in": 0.16999999999999998,
2778      "cost_per_1m_out": 0.43,
2779      "cost_per_1m_in_cached": 0,
2780      "cost_per_1m_out_cached": 0,
2781      "context_window": 32768,
2782      "default_max_tokens": 3276,
2783      "can_reason": false,
2784      "supports_attachments": false,
2785      "options": {}
2786    },
2787    {
2788      "id": "thedrummer/unslopnemo-12b",
2789      "name": "TheDrummer: UnslopNemo 12B",
2790      "cost_per_1m_in": 0.39999999999999997,
2791      "cost_per_1m_out": 0.39999999999999997,
2792      "cost_per_1m_in_cached": 0,
2793      "cost_per_1m_out_cached": 0,
2794      "context_window": 32768,
2795      "default_max_tokens": 3276,
2796      "can_reason": false,
2797      "supports_attachments": false,
2798      "options": {}
2799    },
2800    {
2801      "id": "alibaba/tongyi-deepresearch-30b-a3b",
2802      "name": "Tongyi DeepResearch 30B A3B",
2803      "cost_per_1m_in": 0.09,
2804      "cost_per_1m_out": 0.39999999999999997,
2805      "cost_per_1m_in_cached": 0,
2806      "cost_per_1m_out_cached": 0,
2807      "context_window": 131072,
2808      "default_max_tokens": 65536,
2809      "can_reason": true,
2810      "reasoning_levels": [
2811        "low",
2812        "medium",
2813        "high"
2814      ],
2815      "default_reasoning_effort": "medium",
2816      "supports_attachments": false,
2817      "options": {}
2818    },
2819    {
2820      "id": "alibaba/tongyi-deepresearch-30b-a3b:free",
2821      "name": "Tongyi DeepResearch 30B A3B (free)",
2822      "cost_per_1m_in": 0,
2823      "cost_per_1m_out": 0,
2824      "cost_per_1m_in_cached": 0,
2825      "cost_per_1m_out_cached": 0,
2826      "context_window": 131072,
2827      "default_max_tokens": 65536,
2828      "can_reason": true,
2829      "reasoning_levels": [
2830        "low",
2831        "medium",
2832        "high"
2833      ],
2834      "default_reasoning_effort": "medium",
2835      "supports_attachments": false,
2836      "options": {}
2837    },
2838    {
2839      "id": "z-ai/glm-4-32b",
2840      "name": "Z.AI: GLM 4 32B ",
2841      "cost_per_1m_in": 0.09999999999999999,
2842      "cost_per_1m_out": 0.09999999999999999,
2843      "cost_per_1m_in_cached": 0,
2844      "cost_per_1m_out_cached": 0,
2845      "context_window": 128000,
2846      "default_max_tokens": 12800,
2847      "can_reason": false,
2848      "supports_attachments": false,
2849      "options": {}
2850    },
2851    {
2852      "id": "z-ai/glm-4.5",
2853      "name": "Z.AI: GLM 4.5",
2854      "cost_per_1m_in": 0.35,
2855      "cost_per_1m_out": 1.55,
2856      "cost_per_1m_in_cached": 0,
2857      "cost_per_1m_out_cached": 0,
2858      "context_window": 131072,
2859      "default_max_tokens": 65536,
2860      "can_reason": true,
2861      "reasoning_levels": [
2862        "low",
2863        "medium",
2864        "high"
2865      ],
2866      "default_reasoning_effort": "medium",
2867      "supports_attachments": false,
2868      "options": {}
2869    },
2870    {
2871      "id": "z-ai/glm-4.5-air",
2872      "name": "Z.AI: GLM 4.5 Air",
2873      "cost_per_1m_in": 0.19999999999999998,
2874      "cost_per_1m_out": 1.1,
2875      "cost_per_1m_in_cached": 0,
2876      "cost_per_1m_out_cached": 0.03,
2877      "context_window": 131072,
2878      "default_max_tokens": 48000,
2879      "can_reason": true,
2880      "reasoning_levels": [
2881        "low",
2882        "medium",
2883        "high"
2884      ],
2885      "default_reasoning_effort": "medium",
2886      "supports_attachments": false,
2887      "options": {}
2888    },
2889    {
2890      "id": "z-ai/glm-4.5-air:free",
2891      "name": "Z.AI: GLM 4.5 Air (free)",
2892      "cost_per_1m_in": 0,
2893      "cost_per_1m_out": 0,
2894      "cost_per_1m_in_cached": 0,
2895      "cost_per_1m_out_cached": 0,
2896      "context_window": 131072,
2897      "default_max_tokens": 48000,
2898      "can_reason": true,
2899      "reasoning_levels": [
2900        "low",
2901        "medium",
2902        "high"
2903      ],
2904      "default_reasoning_effort": "medium",
2905      "supports_attachments": false,
2906      "options": {}
2907    },
2908    {
2909      "id": "z-ai/glm-4.5v",
2910      "name": "Z.AI: GLM 4.5V",
2911      "cost_per_1m_in": 0.6,
2912      "cost_per_1m_out": 1.7999999999999998,
2913      "cost_per_1m_in_cached": 0,
2914      "cost_per_1m_out_cached": 0.11,
2915      "context_window": 65536,
2916      "default_max_tokens": 8192,
2917      "can_reason": true,
2918      "reasoning_levels": [
2919        "low",
2920        "medium",
2921        "high"
2922      ],
2923      "default_reasoning_effort": "medium",
2924      "supports_attachments": true,
2925      "options": {}
2926    },
2927    {
2928      "id": "z-ai/glm-4.6",
2929      "name": "Z.AI: GLM 4.6",
2930      "cost_per_1m_in": 0.6,
2931      "cost_per_1m_out": 2.2,
2932      "cost_per_1m_in_cached": 0,
2933      "cost_per_1m_out_cached": 0.11,
2934      "context_window": 204800,
2935      "default_max_tokens": 65536,
2936      "can_reason": true,
2937      "reasoning_levels": [
2938        "low",
2939        "medium",
2940        "high"
2941      ],
2942      "default_reasoning_effort": "medium",
2943      "supports_attachments": false,
2944      "options": {}
2945    },
2946    {
2947      "id": "z-ai/glm-4.6:exacto",
2948      "name": "Z.AI: GLM 4.6 (exacto)",
2949      "cost_per_1m_in": 0.6,
2950      "cost_per_1m_out": 2.2,
2951      "cost_per_1m_in_cached": 0,
2952      "cost_per_1m_out_cached": 0,
2953      "context_window": 204800,
2954      "default_max_tokens": 65536,
2955      "can_reason": true,
2956      "reasoning_levels": [
2957        "low",
2958        "medium",
2959        "high"
2960      ],
2961      "default_reasoning_effort": "medium",
2962      "supports_attachments": false,
2963      "options": {}
2964    },
2965    {
2966      "id": "x-ai/grok-3",
2967      "name": "xAI: Grok 3",
2968      "cost_per_1m_in": 5,
2969      "cost_per_1m_out": 25,
2970      "cost_per_1m_in_cached": 0,
2971      "cost_per_1m_out_cached": 1.25,
2972      "context_window": 131072,
2973      "default_max_tokens": 13107,
2974      "can_reason": false,
2975      "supports_attachments": false,
2976      "options": {}
2977    },
2978    {
2979      "id": "x-ai/grok-3-beta",
2980      "name": "xAI: Grok 3 Beta",
2981      "cost_per_1m_in": 5,
2982      "cost_per_1m_out": 25,
2983      "cost_per_1m_in_cached": 0,
2984      "cost_per_1m_out_cached": 1.25,
2985      "context_window": 131072,
2986      "default_max_tokens": 13107,
2987      "can_reason": false,
2988      "supports_attachments": false,
2989      "options": {}
2990    },
2991    {
2992      "id": "x-ai/grok-3-mini",
2993      "name": "xAI: Grok 3 Mini",
2994      "cost_per_1m_in": 0.3,
2995      "cost_per_1m_out": 0.5,
2996      "cost_per_1m_in_cached": 0,
2997      "cost_per_1m_out_cached": 0.075,
2998      "context_window": 131072,
2999      "default_max_tokens": 13107,
3000      "can_reason": true,
3001      "reasoning_levels": [
3002        "low",
3003        "medium",
3004        "high"
3005      ],
3006      "default_reasoning_effort": "medium",
3007      "supports_attachments": false,
3008      "options": {}
3009    },
3010    {
3011      "id": "x-ai/grok-3-mini-beta",
3012      "name": "xAI: Grok 3 Mini Beta",
3013      "cost_per_1m_in": 0.3,
3014      "cost_per_1m_out": 0.5,
3015      "cost_per_1m_in_cached": 0,
3016      "cost_per_1m_out_cached": 0.075,
3017      "context_window": 131072,
3018      "default_max_tokens": 13107,
3019      "can_reason": true,
3020      "reasoning_levels": [
3021        "low",
3022        "medium",
3023        "high"
3024      ],
3025      "default_reasoning_effort": "medium",
3026      "supports_attachments": false,
3027      "options": {}
3028    },
3029    {
3030      "id": "x-ai/grok-4",
3031      "name": "xAI: Grok 4",
3032      "cost_per_1m_in": 3,
3033      "cost_per_1m_out": 15,
3034      "cost_per_1m_in_cached": 0,
3035      "cost_per_1m_out_cached": 0.75,
3036      "context_window": 256000,
3037      "default_max_tokens": 25600,
3038      "can_reason": true,
3039      "reasoning_levels": [
3040        "low",
3041        "medium",
3042        "high"
3043      ],
3044      "default_reasoning_effort": "medium",
3045      "supports_attachments": true,
3046      "options": {}
3047    },
3048    {
3049      "id": "x-ai/grok-4-fast",
3050      "name": "xAI: Grok 4 Fast",
3051      "cost_per_1m_in": 0.19999999999999998,
3052      "cost_per_1m_out": 0.5,
3053      "cost_per_1m_in_cached": 0,
3054      "cost_per_1m_out_cached": 0.049999999999999996,
3055      "context_window": 2000000,
3056      "default_max_tokens": 15000,
3057      "can_reason": true,
3058      "reasoning_levels": [
3059        "low",
3060        "medium",
3061        "high"
3062      ],
3063      "default_reasoning_effort": "medium",
3064      "supports_attachments": true,
3065      "options": {}
3066    },
3067    {
3068      "id": "x-ai/grok-4.1-fast",
3069      "name": "xAI: Grok 4.1 Fast",
3070      "cost_per_1m_in": 0,
3071      "cost_per_1m_out": 0,
3072      "cost_per_1m_in_cached": 0,
3073      "cost_per_1m_out_cached": 0,
3074      "context_window": 2000000,
3075      "default_max_tokens": 15000,
3076      "can_reason": true,
3077      "reasoning_levels": [
3078        "low",
3079        "medium",
3080        "high"
3081      ],
3082      "default_reasoning_effort": "medium",
3083      "supports_attachments": true,
3084      "options": {}
3085    },
3086    {
3087      "id": "x-ai/grok-4.1-fast:free",
3088      "name": "xAI: Grok 4.1 Fast (free)",
3089      "cost_per_1m_in": 0,
3090      "cost_per_1m_out": 0,
3091      "cost_per_1m_in_cached": 0,
3092      "cost_per_1m_out_cached": 0,
3093      "context_window": 2000000,
3094      "default_max_tokens": 15000,
3095      "can_reason": true,
3096      "reasoning_levels": [
3097        "low",
3098        "medium",
3099        "high"
3100      ],
3101      "default_reasoning_effort": "medium",
3102      "supports_attachments": true,
3103      "options": {}
3104    },
3105    {
3106      "id": "x-ai/grok-code-fast-1",
3107      "name": "xAI: Grok Code Fast 1",
3108      "cost_per_1m_in": 0.19999999999999998,
3109      "cost_per_1m_out": 1.5,
3110      "cost_per_1m_in_cached": 0,
3111      "cost_per_1m_out_cached": 0.02,
3112      "context_window": 256000,
3113      "default_max_tokens": 5000,
3114      "can_reason": true,
3115      "reasoning_levels": [
3116        "low",
3117        "medium",
3118        "high"
3119      ],
3120      "default_reasoning_effort": "medium",
3121      "supports_attachments": false,
3122      "options": {}
3123    }
3124  ],
3125  "default_headers": {
3126    "HTTP-Referer": "https://charm.land",
3127    "X-Title": "Crush"
3128  }
3129}