openrouter.json

   1{
   2  "name": "OpenRouter",
   3  "id": "openrouter",
   4  "api_key": "$OPENROUTER_API_KEY",
   5  "api_endpoint": "https://openrouter.ai/api/v1",
   6  "type": "openrouter",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-3.5-haiku",
   9  "models": [
  10    {
  11      "id": "ai21/jamba-large-1.7",
  12      "name": "AI21: Jamba Large 1.7",
  13      "cost_per_1m_in": 2,
  14      "cost_per_1m_out": 8,
  15      "cost_per_1m_in_cached": 0,
  16      "cost_per_1m_out_cached": 0,
  17      "context_window": 256000,
  18      "default_max_tokens": 2048,
  19      "can_reason": false,
  20      "supports_attachments": false,
  21      "options": {}
  22    },
  23    {
  24      "id": "ai21/jamba-mini-1.7",
  25      "name": "AI21: Jamba Mini 1.7",
  26      "cost_per_1m_in": 0.19999999999999998,
  27      "cost_per_1m_out": 0.39999999999999997,
  28      "cost_per_1m_in_cached": 0,
  29      "cost_per_1m_out_cached": 0,
  30      "context_window": 256000,
  31      "default_max_tokens": 2048,
  32      "can_reason": false,
  33      "supports_attachments": false,
  34      "options": {}
  35    },
  36    {
  37      "id": "amazon/nova-lite-v1",
  38      "name": "Amazon: Nova Lite 1.0",
  39      "cost_per_1m_in": 0.06,
  40      "cost_per_1m_out": 0.24,
  41      "cost_per_1m_in_cached": 0,
  42      "cost_per_1m_out_cached": 0,
  43      "context_window": 300000,
  44      "default_max_tokens": 2560,
  45      "can_reason": false,
  46      "supports_attachments": true,
  47      "options": {}
  48    },
  49    {
  50      "id": "amazon/nova-micro-v1",
  51      "name": "Amazon: Nova Micro 1.0",
  52      "cost_per_1m_in": 0.035,
  53      "cost_per_1m_out": 0.14,
  54      "cost_per_1m_in_cached": 0,
  55      "cost_per_1m_out_cached": 0,
  56      "context_window": 128000,
  57      "default_max_tokens": 2560,
  58      "can_reason": false,
  59      "supports_attachments": false,
  60      "options": {}
  61    },
  62    {
  63      "id": "amazon/nova-premier-v1",
  64      "name": "Amazon: Nova Premier 1.0",
  65      "cost_per_1m_in": 2.5,
  66      "cost_per_1m_out": 12.5,
  67      "cost_per_1m_in_cached": 0,
  68      "cost_per_1m_out_cached": 0.625,
  69      "context_window": 1000000,
  70      "default_max_tokens": 16000,
  71      "can_reason": false,
  72      "supports_attachments": true,
  73      "options": {}
  74    },
  75    {
  76      "id": "amazon/nova-pro-v1",
  77      "name": "Amazon: Nova Pro 1.0",
  78      "cost_per_1m_in": 0.7999999999999999,
  79      "cost_per_1m_out": 3.1999999999999997,
  80      "cost_per_1m_in_cached": 0,
  81      "cost_per_1m_out_cached": 0,
  82      "context_window": 300000,
  83      "default_max_tokens": 2560,
  84      "can_reason": false,
  85      "supports_attachments": true,
  86      "options": {}
  87    },
  88    {
  89      "id": "anthropic/claude-3-haiku",
  90      "name": "Anthropic: Claude 3 Haiku",
  91      "cost_per_1m_in": 0.25,
  92      "cost_per_1m_out": 1.25,
  93      "cost_per_1m_in_cached": 0.3,
  94      "cost_per_1m_out_cached": 0.03,
  95      "context_window": 200000,
  96      "default_max_tokens": 2048,
  97      "can_reason": false,
  98      "supports_attachments": true,
  99      "options": {}
 100    },
 101    {
 102      "id": "anthropic/claude-3-opus",
 103      "name": "Anthropic: Claude 3 Opus",
 104      "cost_per_1m_in": 15,
 105      "cost_per_1m_out": 75,
 106      "cost_per_1m_in_cached": 18.75,
 107      "cost_per_1m_out_cached": 1.5,
 108      "context_window": 200000,
 109      "default_max_tokens": 2048,
 110      "can_reason": false,
 111      "supports_attachments": true,
 112      "options": {}
 113    },
 114    {
 115      "id": "anthropic/claude-3.5-haiku",
 116      "name": "Anthropic: Claude 3.5 Haiku",
 117      "cost_per_1m_in": 0.7999999999999999,
 118      "cost_per_1m_out": 4,
 119      "cost_per_1m_in_cached": 1,
 120      "cost_per_1m_out_cached": 0.08,
 121      "context_window": 200000,
 122      "default_max_tokens": 4096,
 123      "can_reason": false,
 124      "supports_attachments": true,
 125      "options": {}
 126    },
 127    {
 128      "id": "anthropic/claude-3.5-haiku-20241022",
 129      "name": "Anthropic: Claude 3.5 Haiku (2024-10-22)",
 130      "cost_per_1m_in": 0.7999999999999999,
 131      "cost_per_1m_out": 4,
 132      "cost_per_1m_in_cached": 1,
 133      "cost_per_1m_out_cached": 0.08,
 134      "context_window": 200000,
 135      "default_max_tokens": 4096,
 136      "can_reason": false,
 137      "supports_attachments": true,
 138      "options": {}
 139    },
 140    {
 141      "id": "anthropic/claude-3.5-sonnet",
 142      "name": "Anthropic: Claude 3.5 Sonnet",
 143      "cost_per_1m_in": 3,
 144      "cost_per_1m_out": 15,
 145      "cost_per_1m_in_cached": 0,
 146      "cost_per_1m_out_cached": 0,
 147      "context_window": 200000,
 148      "default_max_tokens": 4096,
 149      "can_reason": false,
 150      "supports_attachments": true,
 151      "options": {}
 152    },
 153    {
 154      "id": "anthropic/claude-3.5-sonnet-20240620",
 155      "name": "Anthropic: Claude 3.5 Sonnet (2024-06-20)",
 156      "cost_per_1m_in": 3,
 157      "cost_per_1m_out": 15,
 158      "cost_per_1m_in_cached": 3.75,
 159      "cost_per_1m_out_cached": 0.3,
 160      "context_window": 200000,
 161      "default_max_tokens": 4096,
 162      "can_reason": false,
 163      "supports_attachments": true,
 164      "options": {}
 165    },
 166    {
 167      "id": "anthropic/claude-3.7-sonnet",
 168      "name": "Anthropic: Claude 3.7 Sonnet",
 169      "cost_per_1m_in": 3,
 170      "cost_per_1m_out": 15,
 171      "cost_per_1m_in_cached": 3.75,
 172      "cost_per_1m_out_cached": 0.3,
 173      "context_window": 200000,
 174      "default_max_tokens": 32000,
 175      "can_reason": true,
 176      "reasoning_levels": [
 177        "low",
 178        "medium",
 179        "high"
 180      ],
 181      "default_reasoning_effort": "medium",
 182      "supports_attachments": true,
 183      "options": {}
 184    },
 185    {
 186      "id": "anthropic/claude-3.7-sonnet:thinking",
 187      "name": "Anthropic: Claude 3.7 Sonnet (thinking)",
 188      "cost_per_1m_in": 3,
 189      "cost_per_1m_out": 15,
 190      "cost_per_1m_in_cached": 3.75,
 191      "cost_per_1m_out_cached": 0.3,
 192      "context_window": 200000,
 193      "default_max_tokens": 32000,
 194      "can_reason": true,
 195      "reasoning_levels": [
 196        "low",
 197        "medium",
 198        "high"
 199      ],
 200      "default_reasoning_effort": "medium",
 201      "supports_attachments": true,
 202      "options": {}
 203    },
 204    {
 205      "id": "anthropic/claude-haiku-4.5",
 206      "name": "Anthropic: Claude Haiku 4.5",
 207      "cost_per_1m_in": 1,
 208      "cost_per_1m_out": 5,
 209      "cost_per_1m_in_cached": 1.25,
 210      "cost_per_1m_out_cached": 0.09999999999999999,
 211      "context_window": 200000,
 212      "default_max_tokens": 32000,
 213      "can_reason": true,
 214      "reasoning_levels": [
 215        "low",
 216        "medium",
 217        "high"
 218      ],
 219      "default_reasoning_effort": "medium",
 220      "supports_attachments": true,
 221      "options": {}
 222    },
 223    {
 224      "id": "anthropic/claude-opus-4",
 225      "name": "Anthropic: Claude Opus 4",
 226      "cost_per_1m_in": 15,
 227      "cost_per_1m_out": 75,
 228      "cost_per_1m_in_cached": 18.75,
 229      "cost_per_1m_out_cached": 1.5,
 230      "context_window": 200000,
 231      "default_max_tokens": 16000,
 232      "can_reason": true,
 233      "reasoning_levels": [
 234        "low",
 235        "medium",
 236        "high"
 237      ],
 238      "default_reasoning_effort": "medium",
 239      "supports_attachments": true,
 240      "options": {}
 241    },
 242    {
 243      "id": "anthropic/claude-opus-4.1",
 244      "name": "Anthropic: Claude Opus 4.1",
 245      "cost_per_1m_in": 15,
 246      "cost_per_1m_out": 75,
 247      "cost_per_1m_in_cached": 18.75,
 248      "cost_per_1m_out_cached": 1.5,
 249      "context_window": 200000,
 250      "default_max_tokens": 16000,
 251      "can_reason": true,
 252      "reasoning_levels": [
 253        "low",
 254        "medium",
 255        "high"
 256      ],
 257      "default_reasoning_effort": "medium",
 258      "supports_attachments": true,
 259      "options": {}
 260    },
 261    {
 262      "id": "anthropic/claude-sonnet-4",
 263      "name": "Anthropic: Claude Sonnet 4",
 264      "cost_per_1m_in": 3,
 265      "cost_per_1m_out": 15,
 266      "cost_per_1m_in_cached": 3.75,
 267      "cost_per_1m_out_cached": 0.3,
 268      "context_window": 1000000,
 269      "default_max_tokens": 32000,
 270      "can_reason": true,
 271      "reasoning_levels": [
 272        "low",
 273        "medium",
 274        "high"
 275      ],
 276      "default_reasoning_effort": "medium",
 277      "supports_attachments": true,
 278      "options": {}
 279    },
 280    {
 281      "id": "anthropic/claude-sonnet-4.5",
 282      "name": "Anthropic: Claude Sonnet 4.5",
 283      "cost_per_1m_in": 3,
 284      "cost_per_1m_out": 15,
 285      "cost_per_1m_in_cached": 3.75,
 286      "cost_per_1m_out_cached": 0.3,
 287      "context_window": 1000000,
 288      "default_max_tokens": 32000,
 289      "can_reason": true,
 290      "reasoning_levels": [
 291        "low",
 292        "medium",
 293        "high"
 294      ],
 295      "default_reasoning_effort": "medium",
 296      "supports_attachments": true,
 297      "options": {}
 298    },
 299    {
 300      "id": "arcee-ai/virtuoso-large",
 301      "name": "Arcee AI: Virtuoso Large",
 302      "cost_per_1m_in": 0.75,
 303      "cost_per_1m_out": 1.2,
 304      "cost_per_1m_in_cached": 0,
 305      "cost_per_1m_out_cached": 0,
 306      "context_window": 131072,
 307      "default_max_tokens": 32000,
 308      "can_reason": false,
 309      "supports_attachments": false,
 310      "options": {}
 311    },
 312    {
 313      "id": "baidu/ernie-4.5-21b-a3b",
 314      "name": "Baidu: ERNIE 4.5 21B A3B",
 315      "cost_per_1m_in": 0.07,
 316      "cost_per_1m_out": 0.28,
 317      "cost_per_1m_in_cached": 0,
 318      "cost_per_1m_out_cached": 0,
 319      "context_window": 120000,
 320      "default_max_tokens": 4000,
 321      "can_reason": false,
 322      "supports_attachments": false,
 323      "options": {}
 324    },
 325    {
 326      "id": "baidu/ernie-4.5-vl-28b-a3b",
 327      "name": "Baidu: ERNIE 4.5 VL 28B A3B",
 328      "cost_per_1m_in": 0.14,
 329      "cost_per_1m_out": 0.56,
 330      "cost_per_1m_in_cached": 0,
 331      "cost_per_1m_out_cached": 0,
 332      "context_window": 30000,
 333      "default_max_tokens": 4000,
 334      "can_reason": true,
 335      "reasoning_levels": [
 336        "low",
 337        "medium",
 338        "high"
 339      ],
 340      "default_reasoning_effort": "medium",
 341      "supports_attachments": true,
 342      "options": {}
 343    },
 344    {
 345      "id": "deepcogito/cogito-v2-preview-llama-109b-moe",
 346      "name": "Cogito V2 Preview Llama 109B",
 347      "cost_per_1m_in": 0.18,
 348      "cost_per_1m_out": 0.59,
 349      "cost_per_1m_in_cached": 0,
 350      "cost_per_1m_out_cached": 0,
 351      "context_window": 32767,
 352      "default_max_tokens": 3276,
 353      "can_reason": true,
 354      "reasoning_levels": [
 355        "low",
 356        "medium",
 357        "high"
 358      ],
 359      "default_reasoning_effort": "medium",
 360      "supports_attachments": true,
 361      "options": {}
 362    },
 363    {
 364      "id": "cohere/command-r-08-2024",
 365      "name": "Cohere: Command R (08-2024)",
 366      "cost_per_1m_in": 0.15,
 367      "cost_per_1m_out": 0.6,
 368      "cost_per_1m_in_cached": 0,
 369      "cost_per_1m_out_cached": 0,
 370      "context_window": 128000,
 371      "default_max_tokens": 2000,
 372      "can_reason": false,
 373      "supports_attachments": false,
 374      "options": {}
 375    },
 376    {
 377      "id": "cohere/command-r-plus-08-2024",
 378      "name": "Cohere: Command R+ (08-2024)",
 379      "cost_per_1m_in": 2.5,
 380      "cost_per_1m_out": 10,
 381      "cost_per_1m_in_cached": 0,
 382      "cost_per_1m_out_cached": 0,
 383      "context_window": 128000,
 384      "default_max_tokens": 2000,
 385      "can_reason": false,
 386      "supports_attachments": false,
 387      "options": {}
 388    },
 389    {
 390      "id": "deepcogito/cogito-v2-preview-llama-405b",
 391      "name": "Deep Cogito: Cogito V2 Preview Llama 405B",
 392      "cost_per_1m_in": 3.5,
 393      "cost_per_1m_out": 3.5,
 394      "cost_per_1m_in_cached": 0,
 395      "cost_per_1m_out_cached": 0,
 396      "context_window": 32768,
 397      "default_max_tokens": 3276,
 398      "can_reason": true,
 399      "reasoning_levels": [
 400        "low",
 401        "medium",
 402        "high"
 403      ],
 404      "default_reasoning_effort": "medium",
 405      "supports_attachments": false,
 406      "options": {}
 407    },
 408    {
 409      "id": "deepcogito/cogito-v2-preview-llama-70b",
 410      "name": "Deep Cogito: Cogito V2 Preview Llama 70B",
 411      "cost_per_1m_in": 0.88,
 412      "cost_per_1m_out": 0.88,
 413      "cost_per_1m_in_cached": 0,
 414      "cost_per_1m_out_cached": 0,
 415      "context_window": 32768,
 416      "default_max_tokens": 3276,
 417      "can_reason": true,
 418      "reasoning_levels": [
 419        "low",
 420        "medium",
 421        "high"
 422      ],
 423      "default_reasoning_effort": "medium",
 424      "supports_attachments": false,
 425      "options": {}
 426    },
 427    {
 428      "id": "deepseek/deepseek-chat",
 429      "name": "DeepSeek: DeepSeek V3",
 430      "cost_per_1m_in": 0.39999999999999997,
 431      "cost_per_1m_out": 1.3,
 432      "cost_per_1m_in_cached": 0,
 433      "cost_per_1m_out_cached": 0,
 434      "context_window": 64000,
 435      "default_max_tokens": 8000,
 436      "can_reason": false,
 437      "supports_attachments": false,
 438      "options": {}
 439    },
 440    {
 441      "id": "deepseek/deepseek-chat-v3-0324",
 442      "name": "DeepSeek: DeepSeek V3 0324",
 443      "cost_per_1m_in": 0.9,
 444      "cost_per_1m_out": 0.9,
 445      "cost_per_1m_in_cached": 0,
 446      "cost_per_1m_out_cached": 0,
 447      "context_window": 163840,
 448      "default_max_tokens": 10240,
 449      "can_reason": false,
 450      "supports_attachments": false,
 451      "options": {}
 452    },
 453    {
 454      "id": "deepseek/deepseek-chat-v3-0324:free",
 455      "name": "DeepSeek: DeepSeek V3 0324 (free)",
 456      "cost_per_1m_in": 0,
 457      "cost_per_1m_out": 0,
 458      "cost_per_1m_in_cached": 0,
 459      "cost_per_1m_out_cached": 0,
 460      "context_window": 163840,
 461      "default_max_tokens": 16384,
 462      "can_reason": false,
 463      "supports_attachments": false,
 464      "options": {}
 465    },
 466    {
 467      "id": "deepseek/deepseek-chat-v3.1",
 468      "name": "DeepSeek: DeepSeek V3.1",
 469      "cost_per_1m_in": 0.27,
 470      "cost_per_1m_out": 1,
 471      "cost_per_1m_in_cached": 0,
 472      "cost_per_1m_out_cached": 0,
 473      "context_window": 163840,
 474      "default_max_tokens": 16384,
 475      "can_reason": true,
 476      "reasoning_levels": [
 477        "low",
 478        "medium",
 479        "high"
 480      ],
 481      "default_reasoning_effort": "medium",
 482      "supports_attachments": false,
 483      "options": {}
 484    },
 485    {
 486      "id": "deepseek/deepseek-v3.1-terminus",
 487      "name": "DeepSeek: DeepSeek V3.1 Terminus",
 488      "cost_per_1m_in": 0.27,
 489      "cost_per_1m_out": 1,
 490      "cost_per_1m_in_cached": 0,
 491      "cost_per_1m_out_cached": 0,
 492      "context_window": 163840,
 493      "default_max_tokens": 16384,
 494      "can_reason": true,
 495      "reasoning_levels": [
 496        "low",
 497        "medium",
 498        "high"
 499      ],
 500      "default_reasoning_effort": "medium",
 501      "supports_attachments": false,
 502      "options": {}
 503    },
 504    {
 505      "id": "deepseek/deepseek-v3.1-terminus:exacto",
 506      "name": "DeepSeek: DeepSeek V3.1 Terminus (exacto)",
 507      "cost_per_1m_in": 0.27,
 508      "cost_per_1m_out": 1,
 509      "cost_per_1m_in_cached": 0,
 510      "cost_per_1m_out_cached": 0,
 511      "context_window": 131072,
 512      "default_max_tokens": 32768,
 513      "can_reason": true,
 514      "reasoning_levels": [
 515        "low",
 516        "medium",
 517        "high"
 518      ],
 519      "default_reasoning_effort": "medium",
 520      "supports_attachments": false,
 521      "options": {}
 522    },
 523    {
 524      "id": "deepseek/deepseek-v3.2-exp",
 525      "name": "DeepSeek: DeepSeek V3.2 Exp",
 526      "cost_per_1m_in": 0.27,
 527      "cost_per_1m_out": 0.41,
 528      "cost_per_1m_in_cached": 0,
 529      "cost_per_1m_out_cached": 0,
 530      "context_window": 163840,
 531      "default_max_tokens": 81920,
 532      "can_reason": true,
 533      "reasoning_levels": [
 534        "low",
 535        "medium",
 536        "high"
 537      ],
 538      "default_reasoning_effort": "medium",
 539      "supports_attachments": false,
 540      "options": {}
 541    },
 542    {
 543      "id": "deepseek/deepseek-r1",
 544      "name": "DeepSeek: R1",
 545      "cost_per_1m_in": 0.7,
 546      "cost_per_1m_out": 2.4,
 547      "cost_per_1m_in_cached": 0,
 548      "cost_per_1m_out_cached": 0,
 549      "context_window": 163840,
 550      "default_max_tokens": 81920,
 551      "can_reason": true,
 552      "reasoning_levels": [
 553        "low",
 554        "medium",
 555        "high"
 556      ],
 557      "default_reasoning_effort": "medium",
 558      "supports_attachments": false,
 559      "options": {}
 560    },
 561    {
 562      "id": "deepseek/deepseek-r1-0528",
 563      "name": "DeepSeek: R1 0528",
 564      "cost_per_1m_in": 0.7999999999999999,
 565      "cost_per_1m_out": 2.4,
 566      "cost_per_1m_in_cached": 0,
 567      "cost_per_1m_out_cached": 0,
 568      "context_window": 163840,
 569      "default_max_tokens": 16384,
 570      "can_reason": true,
 571      "reasoning_levels": [
 572        "low",
 573        "medium",
 574        "high"
 575      ],
 576      "default_reasoning_effort": "medium",
 577      "supports_attachments": false,
 578      "options": {}
 579    },
 580    {
 581      "id": "deepseek/deepseek-r1-distill-llama-70b",
 582      "name": "DeepSeek: R1 Distill Llama 70B",
 583      "cost_per_1m_in": 0.03,
 584      "cost_per_1m_out": 0.13,
 585      "cost_per_1m_in_cached": 0,
 586      "cost_per_1m_out_cached": 0,
 587      "context_window": 131072,
 588      "default_max_tokens": 65536,
 589      "can_reason": true,
 590      "reasoning_levels": [
 591        "low",
 592        "medium",
 593        "high"
 594      ],
 595      "default_reasoning_effort": "medium",
 596      "supports_attachments": false,
 597      "options": {}
 598    },
 599    {
 600      "id": "google/gemini-2.0-flash-001",
 601      "name": "Google: Gemini 2.0 Flash",
 602      "cost_per_1m_in": 0.09999999999999999,
 603      "cost_per_1m_out": 0.39999999999999997,
 604      "cost_per_1m_in_cached": 0.18330000000000002,
 605      "cost_per_1m_out_cached": 0.024999999999999998,
 606      "context_window": 1048576,
 607      "default_max_tokens": 4096,
 608      "can_reason": false,
 609      "supports_attachments": true,
 610      "options": {}
 611    },
 612    {
 613      "id": "google/gemini-2.0-flash-exp:free",
 614      "name": "Google: Gemini 2.0 Flash Experimental (free)",
 615      "cost_per_1m_in": 0,
 616      "cost_per_1m_out": 0,
 617      "cost_per_1m_in_cached": 0,
 618      "cost_per_1m_out_cached": 0,
 619      "context_window": 1048576,
 620      "default_max_tokens": 4096,
 621      "can_reason": false,
 622      "supports_attachments": true,
 623      "options": {}
 624    },
 625    {
 626      "id": "google/gemini-2.0-flash-lite-001",
 627      "name": "Google: Gemini 2.0 Flash Lite",
 628      "cost_per_1m_in": 0.075,
 629      "cost_per_1m_out": 0.3,
 630      "cost_per_1m_in_cached": 0,
 631      "cost_per_1m_out_cached": 0,
 632      "context_window": 1048576,
 633      "default_max_tokens": 4096,
 634      "can_reason": false,
 635      "supports_attachments": true,
 636      "options": {}
 637    },
 638    {
 639      "id": "google/gemini-2.5-flash",
 640      "name": "Google: Gemini 2.5 Flash",
 641      "cost_per_1m_in": 0.3,
 642      "cost_per_1m_out": 2.5,
 643      "cost_per_1m_in_cached": 0.3833,
 644      "cost_per_1m_out_cached": 0.03,
 645      "context_window": 1048576,
 646      "default_max_tokens": 32767,
 647      "can_reason": true,
 648      "reasoning_levels": [
 649        "low",
 650        "medium",
 651        "high"
 652      ],
 653      "default_reasoning_effort": "medium",
 654      "supports_attachments": true,
 655      "options": {}
 656    },
 657    {
 658      "id": "google/gemini-2.5-flash-lite",
 659      "name": "Google: Gemini 2.5 Flash Lite",
 660      "cost_per_1m_in": 0.09999999999999999,
 661      "cost_per_1m_out": 0.39999999999999997,
 662      "cost_per_1m_in_cached": 0.18330000000000002,
 663      "cost_per_1m_out_cached": 0.024999999999999998,
 664      "context_window": 1048576,
 665      "default_max_tokens": 32767,
 666      "can_reason": true,
 667      "reasoning_levels": [
 668        "low",
 669        "medium",
 670        "high"
 671      ],
 672      "default_reasoning_effort": "medium",
 673      "supports_attachments": true,
 674      "options": {}
 675    },
 676    {
 677      "id": "google/gemini-2.5-flash-lite-preview-06-17",
 678      "name": "Google: Gemini 2.5 Flash Lite Preview 06-17",
 679      "cost_per_1m_in": 0.09999999999999999,
 680      "cost_per_1m_out": 0.39999999999999997,
 681      "cost_per_1m_in_cached": 0.18330000000000002,
 682      "cost_per_1m_out_cached": 0.024999999999999998,
 683      "context_window": 1048576,
 684      "default_max_tokens": 32767,
 685      "can_reason": true,
 686      "reasoning_levels": [
 687        "low",
 688        "medium",
 689        "high"
 690      ],
 691      "default_reasoning_effort": "medium",
 692      "supports_attachments": true,
 693      "options": {}
 694    },
 695    {
 696      "id": "google/gemini-2.5-flash-lite-preview-09-2025",
 697      "name": "Google: Gemini 2.5 Flash Lite Preview 09-2025",
 698      "cost_per_1m_in": 0.09999999999999999,
 699      "cost_per_1m_out": 0.39999999999999997,
 700      "cost_per_1m_in_cached": 0,
 701      "cost_per_1m_out_cached": 0,
 702      "context_window": 1048576,
 703      "default_max_tokens": 32768,
 704      "can_reason": true,
 705      "reasoning_levels": [
 706        "low",
 707        "medium",
 708        "high"
 709      ],
 710      "default_reasoning_effort": "medium",
 711      "supports_attachments": true,
 712      "options": {}
 713    },
 714    {
 715      "id": "google/gemini-2.5-flash-preview-09-2025",
 716      "name": "Google: Gemini 2.5 Flash Preview 09-2025",
 717      "cost_per_1m_in": 0.3,
 718      "cost_per_1m_out": 2.5,
 719      "cost_per_1m_in_cached": 0.3833,
 720      "cost_per_1m_out_cached": 0.075,
 721      "context_window": 1048576,
 722      "default_max_tokens": 32767,
 723      "can_reason": true,
 724      "reasoning_levels": [
 725        "low",
 726        "medium",
 727        "high"
 728      ],
 729      "default_reasoning_effort": "medium",
 730      "supports_attachments": true,
 731      "options": {}
 732    },
 733    {
 734      "id": "google/gemini-2.5-pro",
 735      "name": "Google: Gemini 2.5 Pro",
 736      "cost_per_1m_in": 1.25,
 737      "cost_per_1m_out": 10,
 738      "cost_per_1m_in_cached": 1.625,
 739      "cost_per_1m_out_cached": 0.125,
 740      "context_window": 1048576,
 741      "default_max_tokens": 32768,
 742      "can_reason": true,
 743      "reasoning_levels": [
 744        "low",
 745        "medium",
 746        "high"
 747      ],
 748      "default_reasoning_effort": "medium",
 749      "supports_attachments": true,
 750      "options": {}
 751    },
 752    {
 753      "id": "google/gemini-2.5-pro-preview-05-06",
 754      "name": "Google: Gemini 2.5 Pro Preview 05-06",
 755      "cost_per_1m_in": 1.25,
 756      "cost_per_1m_out": 10,
 757      "cost_per_1m_in_cached": 1.625,
 758      "cost_per_1m_out_cached": 0.125,
 759      "context_window": 1048576,
 760      "default_max_tokens": 32768,
 761      "can_reason": true,
 762      "reasoning_levels": [
 763        "low",
 764        "medium",
 765        "high"
 766      ],
 767      "default_reasoning_effort": "medium",
 768      "supports_attachments": true,
 769      "options": {}
 770    },
 771    {
 772      "id": "google/gemini-2.5-pro-preview",
 773      "name": "Google: Gemini 2.5 Pro Preview 06-05",
 774      "cost_per_1m_in": 1.25,
 775      "cost_per_1m_out": 10,
 776      "cost_per_1m_in_cached": 1.625,
 777      "cost_per_1m_out_cached": 0.125,
 778      "context_window": 1048576,
 779      "default_max_tokens": 32768,
 780      "can_reason": true,
 781      "reasoning_levels": [
 782        "low",
 783        "medium",
 784        "high"
 785      ],
 786      "default_reasoning_effort": "medium",
 787      "supports_attachments": true,
 788      "options": {}
 789    },
 790    {
 791      "id": "google/gemma-3-27b-it",
 792      "name": "Google: Gemma 3 27B",
 793      "cost_per_1m_in": 0.13,
 794      "cost_per_1m_out": 0.52,
 795      "cost_per_1m_in_cached": 0,
 796      "cost_per_1m_out_cached": 0,
 797      "context_window": 96000,
 798      "default_max_tokens": 48000,
 799      "can_reason": false,
 800      "supports_attachments": true,
 801      "options": {}
 802    },
 803    {
 804      "id": "inception/mercury",
 805      "name": "Inception: Mercury",
 806      "cost_per_1m_in": 0.25,
 807      "cost_per_1m_out": 1,
 808      "cost_per_1m_in_cached": 0,
 809      "cost_per_1m_out_cached": 0,
 810      "context_window": 128000,
 811      "default_max_tokens": 8192,
 812      "can_reason": false,
 813      "supports_attachments": false,
 814      "options": {}
 815    },
 816    {
 817      "id": "inception/mercury-coder",
 818      "name": "Inception: Mercury Coder",
 819      "cost_per_1m_in": 0.25,
 820      "cost_per_1m_out": 1,
 821      "cost_per_1m_in_cached": 0,
 822      "cost_per_1m_out_cached": 0,
 823      "context_window": 128000,
 824      "default_max_tokens": 8192,
 825      "can_reason": false,
 826      "supports_attachments": false,
 827      "options": {}
 828    },
 829    {
 830      "id": "meituan/longcat-flash-chat:free",
 831      "name": "Meituan: LongCat Flash Chat (free)",
 832      "cost_per_1m_in": 0,
 833      "cost_per_1m_out": 0,
 834      "cost_per_1m_in_cached": 0,
 835      "cost_per_1m_out_cached": 0,
 836      "context_window": 131072,
 837      "default_max_tokens": 65536,
 838      "can_reason": false,
 839      "supports_attachments": false,
 840      "options": {}
 841    },
 842    {
 843      "id": "meta-llama/llama-3.1-405b-instruct",
 844      "name": "Meta: Llama 3.1 405B Instruct",
 845      "cost_per_1m_in": 3.5,
 846      "cost_per_1m_out": 3.5,
 847      "cost_per_1m_in_cached": 0,
 848      "cost_per_1m_out_cached": 0,
 849      "context_window": 130815,
 850      "default_max_tokens": 13081,
 851      "can_reason": false,
 852      "supports_attachments": false,
 853      "options": {}
 854    },
 855    {
 856      "id": "meta-llama/llama-3.1-70b-instruct",
 857      "name": "Meta: Llama 3.1 70B Instruct",
 858      "cost_per_1m_in": 0.39999999999999997,
 859      "cost_per_1m_out": 0.39999999999999997,
 860      "cost_per_1m_in_cached": 0,
 861      "cost_per_1m_out_cached": 0,
 862      "context_window": 131072,
 863      "default_max_tokens": 8192,
 864      "can_reason": false,
 865      "supports_attachments": false,
 866      "options": {}
 867    },
 868    {
 869      "id": "meta-llama/llama-3.1-8b-instruct",
 870      "name": "Meta: Llama 3.1 8B Instruct",
 871      "cost_per_1m_in": 0.02,
 872      "cost_per_1m_out": 0.03,
 873      "cost_per_1m_in_cached": 0,
 874      "cost_per_1m_out_cached": 0,
 875      "context_window": 131072,
 876      "default_max_tokens": 8192,
 877      "can_reason": false,
 878      "supports_attachments": false,
 879      "options": {}
 880    },
 881    {
 882      "id": "meta-llama/llama-3.2-3b-instruct",
 883      "name": "Meta: Llama 3.2 3B Instruct",
 884      "cost_per_1m_in": 0.03,
 885      "cost_per_1m_out": 0.049999999999999996,
 886      "cost_per_1m_in_cached": 0,
 887      "cost_per_1m_out_cached": 0,
 888      "context_window": 32768,
 889      "default_max_tokens": 16000,
 890      "can_reason": false,
 891      "supports_attachments": false,
 892      "options": {}
 893    },
 894    {
 895      "id": "meta-llama/llama-3.3-70b-instruct",
 896      "name": "Meta: Llama 3.3 70B Instruct",
 897      "cost_per_1m_in": 0.13,
 898      "cost_per_1m_out": 0.38,
 899      "cost_per_1m_in_cached": 0,
 900      "cost_per_1m_out_cached": 0,
 901      "context_window": 131072,
 902      "default_max_tokens": 8192,
 903      "can_reason": false,
 904      "supports_attachments": false,
 905      "options": {}
 906    },
 907    {
 908      "id": "meta-llama/llama-3.3-70b-instruct:free",
 909      "name": "Meta: Llama 3.3 70B Instruct (free)",
 910      "cost_per_1m_in": 0,
 911      "cost_per_1m_out": 0,
 912      "cost_per_1m_in_cached": 0,
 913      "cost_per_1m_out_cached": 0,
 914      "context_window": 131072,
 915      "default_max_tokens": 13107,
 916      "can_reason": false,
 917      "supports_attachments": false,
 918      "options": {}
 919    },
 920    {
 921      "id": "meta-llama/llama-3.3-8b-instruct:free",
 922      "name": "Meta: Llama 3.3 8B Instruct (free)",
 923      "cost_per_1m_in": 0,
 924      "cost_per_1m_out": 0,
 925      "cost_per_1m_in_cached": 0,
 926      "cost_per_1m_out_cached": 0,
 927      "context_window": 128000,
 928      "default_max_tokens": 2014,
 929      "can_reason": false,
 930      "supports_attachments": false,
 931      "options": {}
 932    },
 933    {
 934      "id": "meta-llama/llama-4-maverick",
 935      "name": "Meta: Llama 4 Maverick",
 936      "cost_per_1m_in": 0.27,
 937      "cost_per_1m_out": 0.85,
 938      "cost_per_1m_in_cached": 0,
 939      "cost_per_1m_out_cached": 0,
 940      "context_window": 1048576,
 941      "default_max_tokens": 104857,
 942      "can_reason": false,
 943      "supports_attachments": true,
 944      "options": {}
 945    },
 946    {
 947      "id": "meta-llama/llama-4-maverick:free",
 948      "name": "Meta: Llama 4 Maverick (free)",
 949      "cost_per_1m_in": 0,
 950      "cost_per_1m_out": 0,
 951      "cost_per_1m_in_cached": 0,
 952      "cost_per_1m_out_cached": 0,
 953      "context_window": 128000,
 954      "default_max_tokens": 2014,
 955      "can_reason": false,
 956      "supports_attachments": true,
 957      "options": {}
 958    },
 959    {
 960      "id": "meta-llama/llama-4-scout",
 961      "name": "Meta: Llama 4 Scout",
 962      "cost_per_1m_in": 0.25,
 963      "cost_per_1m_out": 0.7,
 964      "cost_per_1m_in_cached": 0,
 965      "cost_per_1m_out_cached": 0,
 966      "context_window": 1310720,
 967      "default_max_tokens": 4096,
 968      "can_reason": false,
 969      "supports_attachments": true,
 970      "options": {}
 971    },
 972    {
 973      "id": "meta-llama/llama-4-scout:free",
 974      "name": "Meta: Llama 4 Scout (free)",
 975      "cost_per_1m_in": 0,
 976      "cost_per_1m_out": 0,
 977      "cost_per_1m_in_cached": 0,
 978      "cost_per_1m_out_cached": 0,
 979      "context_window": 128000,
 980      "default_max_tokens": 2014,
 981      "can_reason": false,
 982      "supports_attachments": true,
 983      "options": {}
 984    },
 985    {
 986      "id": "microsoft/phi-3-medium-128k-instruct",
 987      "name": "Microsoft: Phi-3 Medium 128K Instruct",
 988      "cost_per_1m_in": 1,
 989      "cost_per_1m_out": 1,
 990      "cost_per_1m_in_cached": 0,
 991      "cost_per_1m_out_cached": 0,
 992      "context_window": 128000,
 993      "default_max_tokens": 12800,
 994      "can_reason": false,
 995      "supports_attachments": false,
 996      "options": {}
 997    },
 998    {
 999      "id": "microsoft/phi-3-mini-128k-instruct",
1000      "name": "Microsoft: Phi-3 Mini 128K Instruct",
1001      "cost_per_1m_in": 0.09999999999999999,
1002      "cost_per_1m_out": 0.09999999999999999,
1003      "cost_per_1m_in_cached": 0,
1004      "cost_per_1m_out_cached": 0,
1005      "context_window": 128000,
1006      "default_max_tokens": 12800,
1007      "can_reason": false,
1008      "supports_attachments": false,
1009      "options": {}
1010    },
1011    {
1012      "id": "microsoft/phi-3.5-mini-128k-instruct",
1013      "name": "Microsoft: Phi-3.5 Mini 128K Instruct",
1014      "cost_per_1m_in": 0.09999999999999999,
1015      "cost_per_1m_out": 0.09999999999999999,
1016      "cost_per_1m_in_cached": 0,
1017      "cost_per_1m_out_cached": 0,
1018      "context_window": 128000,
1019      "default_max_tokens": 12800,
1020      "can_reason": false,
1021      "supports_attachments": false,
1022      "options": {}
1023    },
1024    {
1025      "id": "minimax/minimax-m2",
1026      "name": "MiniMax: MiniMax M2",
1027      "cost_per_1m_in": 0.3,
1028      "cost_per_1m_out": 1.2,
1029      "cost_per_1m_in_cached": 0,
1030      "cost_per_1m_out_cached": 0,
1031      "context_window": 204800,
1032      "default_max_tokens": 2000,
1033      "can_reason": true,
1034      "reasoning_levels": [
1035        "low",
1036        "medium",
1037        "high"
1038      ],
1039      "default_reasoning_effort": "medium",
1040      "supports_attachments": false,
1041      "options": {}
1042    },
1043    {
1044      "id": "minimax/minimax-m2:free",
1045      "name": "MiniMax: MiniMax M2 (free)",
1046      "cost_per_1m_in": 0,
1047      "cost_per_1m_out": 0,
1048      "cost_per_1m_in_cached": 0,
1049      "cost_per_1m_out_cached": 0,
1050      "context_window": 204800,
1051      "default_max_tokens": 65536,
1052      "can_reason": true,
1053      "reasoning_levels": [
1054        "low",
1055        "medium",
1056        "high"
1057      ],
1058      "default_reasoning_effort": "medium",
1059      "supports_attachments": false,
1060      "options": {}
1061    },
1062    {
1063      "id": "mistralai/mistral-large",
1064      "name": "Mistral Large",
1065      "cost_per_1m_in": 2,
1066      "cost_per_1m_out": 6,
1067      "cost_per_1m_in_cached": 0,
1068      "cost_per_1m_out_cached": 0,
1069      "context_window": 128000,
1070      "default_max_tokens": 12800,
1071      "can_reason": false,
1072      "supports_attachments": false,
1073      "options": {}
1074    },
1075    {
1076      "id": "mistralai/mistral-large-2407",
1077      "name": "Mistral Large 2407",
1078      "cost_per_1m_in": 2,
1079      "cost_per_1m_out": 6,
1080      "cost_per_1m_in_cached": 0,
1081      "cost_per_1m_out_cached": 0,
1082      "context_window": 131072,
1083      "default_max_tokens": 13107,
1084      "can_reason": false,
1085      "supports_attachments": false,
1086      "options": {}
1087    },
1088    {
1089      "id": "mistralai/mistral-large-2411",
1090      "name": "Mistral Large 2411",
1091      "cost_per_1m_in": 2,
1092      "cost_per_1m_out": 6,
1093      "cost_per_1m_in_cached": 0,
1094      "cost_per_1m_out_cached": 0,
1095      "context_window": 131072,
1096      "default_max_tokens": 13107,
1097      "can_reason": false,
1098      "supports_attachments": false,
1099      "options": {}
1100    },
1101    {
1102      "id": "mistralai/mistral-small",
1103      "name": "Mistral Small",
1104      "cost_per_1m_in": 0.19999999999999998,
1105      "cost_per_1m_out": 0.6,
1106      "cost_per_1m_in_cached": 0,
1107      "cost_per_1m_out_cached": 0,
1108      "context_window": 32768,
1109      "default_max_tokens": 3276,
1110      "can_reason": false,
1111      "supports_attachments": false,
1112      "options": {}
1113    },
1114    {
1115      "id": "mistralai/mistral-tiny",
1116      "name": "Mistral Tiny",
1117      "cost_per_1m_in": 0.25,
1118      "cost_per_1m_out": 0.25,
1119      "cost_per_1m_in_cached": 0,
1120      "cost_per_1m_out_cached": 0,
1121      "context_window": 32768,
1122      "default_max_tokens": 3276,
1123      "can_reason": false,
1124      "supports_attachments": false,
1125      "options": {}
1126    },
1127    {
1128      "id": "mistralai/codestral-2501",
1129      "name": "Mistral: Codestral 2501",
1130      "cost_per_1m_in": 0.3,
1131      "cost_per_1m_out": 0.8999999999999999,
1132      "cost_per_1m_in_cached": 0,
1133      "cost_per_1m_out_cached": 0,
1134      "context_window": 262144,
1135      "default_max_tokens": 26214,
1136      "can_reason": false,
1137      "supports_attachments": false,
1138      "options": {}
1139    },
1140    {
1141      "id": "mistralai/codestral-2508",
1142      "name": "Mistral: Codestral 2508",
1143      "cost_per_1m_in": 0.3,
1144      "cost_per_1m_out": 0.8999999999999999,
1145      "cost_per_1m_in_cached": 0,
1146      "cost_per_1m_out_cached": 0,
1147      "context_window": 256000,
1148      "default_max_tokens": 25600,
1149      "can_reason": false,
1150      "supports_attachments": false,
1151      "options": {}
1152    },
1153    {
1154      "id": "mistralai/devstral-medium",
1155      "name": "Mistral: Devstral Medium",
1156      "cost_per_1m_in": 0.39999999999999997,
1157      "cost_per_1m_out": 2,
1158      "cost_per_1m_in_cached": 0,
1159      "cost_per_1m_out_cached": 0,
1160      "context_window": 131072,
1161      "default_max_tokens": 13107,
1162      "can_reason": false,
1163      "supports_attachments": false,
1164      "options": {}
1165    },
1166    {
1167      "id": "mistralai/devstral-small",
1168      "name": "Mistral: Devstral Small 1.1",
1169      "cost_per_1m_in": 0.09999999999999999,
1170      "cost_per_1m_out": 0.3,
1171      "cost_per_1m_in_cached": 0,
1172      "cost_per_1m_out_cached": 0,
1173      "context_window": 131072,
1174      "default_max_tokens": 13107,
1175      "can_reason": false,
1176      "supports_attachments": false,
1177      "options": {}
1178    },
1179    {
1180      "id": "mistralai/magistral-medium-2506",
1181      "name": "Mistral: Magistral Medium 2506",
1182      "cost_per_1m_in": 2,
1183      "cost_per_1m_out": 5,
1184      "cost_per_1m_in_cached": 0,
1185      "cost_per_1m_out_cached": 0,
1186      "context_window": 40960,
1187      "default_max_tokens": 20000,
1188      "can_reason": true,
1189      "reasoning_levels": [
1190        "low",
1191        "medium",
1192        "high"
1193      ],
1194      "default_reasoning_effort": "medium",
1195      "supports_attachments": false,
1196      "options": {}
1197    },
1198    {
1199      "id": "mistralai/magistral-medium-2506:thinking",
1200      "name": "Mistral: Magistral Medium 2506 (thinking)",
1201      "cost_per_1m_in": 2,
1202      "cost_per_1m_out": 5,
1203      "cost_per_1m_in_cached": 0,
1204      "cost_per_1m_out_cached": 0,
1205      "context_window": 40960,
1206      "default_max_tokens": 20000,
1207      "can_reason": true,
1208      "reasoning_levels": [
1209        "low",
1210        "medium",
1211        "high"
1212      ],
1213      "default_reasoning_effort": "medium",
1214      "supports_attachments": false,
1215      "options": {}
1216    },
1217    {
1218      "id": "mistralai/magistral-small-2506",
1219      "name": "Mistral: Magistral Small 2506",
1220      "cost_per_1m_in": 0.5,
1221      "cost_per_1m_out": 1.5,
1222      "cost_per_1m_in_cached": 0,
1223      "cost_per_1m_out_cached": 0,
1224      "context_window": 40000,
1225      "default_max_tokens": 20000,
1226      "can_reason": true,
1227      "reasoning_levels": [
1228        "low",
1229        "medium",
1230        "high"
1231      ],
1232      "default_reasoning_effort": "medium",
1233      "supports_attachments": false,
1234      "options": {}
1235    },
1236    {
1237      "id": "mistralai/ministral-3b",
1238      "name": "Mistral: Ministral 3B",
1239      "cost_per_1m_in": 0.04,
1240      "cost_per_1m_out": 0.04,
1241      "cost_per_1m_in_cached": 0,
1242      "cost_per_1m_out_cached": 0,
1243      "context_window": 131072,
1244      "default_max_tokens": 13107,
1245      "can_reason": false,
1246      "supports_attachments": false,
1247      "options": {}
1248    },
1249    {
1250      "id": "mistralai/ministral-8b",
1251      "name": "Mistral: Ministral 8B",
1252      "cost_per_1m_in": 0.09999999999999999,
1253      "cost_per_1m_out": 0.09999999999999999,
1254      "cost_per_1m_in_cached": 0,
1255      "cost_per_1m_out_cached": 0,
1256      "context_window": 131072,
1257      "default_max_tokens": 13107,
1258      "can_reason": false,
1259      "supports_attachments": false,
1260      "options": {}
1261    },
1262    {
1263      "id": "mistralai/mistral-7b-instruct",
1264      "name": "Mistral: Mistral 7B Instruct",
1265      "cost_per_1m_in": 0.028,
1266      "cost_per_1m_out": 0.054,
1267      "cost_per_1m_in_cached": 0,
1268      "cost_per_1m_out_cached": 0,
1269      "context_window": 32768,
1270      "default_max_tokens": 8192,
1271      "can_reason": false,
1272      "supports_attachments": false,
1273      "options": {}
1274    },
1275    {
1276      "id": "mistralai/mistral-7b-instruct:free",
1277      "name": "Mistral: Mistral 7B Instruct (free)",
1278      "cost_per_1m_in": 0,
1279      "cost_per_1m_out": 0,
1280      "cost_per_1m_in_cached": 0,
1281      "cost_per_1m_out_cached": 0,
1282      "context_window": 32768,
1283      "default_max_tokens": 8192,
1284      "can_reason": false,
1285      "supports_attachments": false,
1286      "options": {}
1287    },
1288    {
1289      "id": "mistralai/mistral-7b-instruct-v0.3",
1290      "name": "Mistral: Mistral 7B Instruct v0.3",
1291      "cost_per_1m_in": 0.028,
1292      "cost_per_1m_out": 0.054,
1293      "cost_per_1m_in_cached": 0,
1294      "cost_per_1m_out_cached": 0,
1295      "context_window": 32768,
1296      "default_max_tokens": 8192,
1297      "can_reason": false,
1298      "supports_attachments": false,
1299      "options": {}
1300    },
1301    {
1302      "id": "mistralai/mistral-medium-3",
1303      "name": "Mistral: Mistral Medium 3",
1304      "cost_per_1m_in": 0.39999999999999997,
1305      "cost_per_1m_out": 2,
1306      "cost_per_1m_in_cached": 0,
1307      "cost_per_1m_out_cached": 0,
1308      "context_window": 131072,
1309      "default_max_tokens": 13107,
1310      "can_reason": false,
1311      "supports_attachments": true,
1312      "options": {}
1313    },
1314    {
1315      "id": "mistralai/mistral-medium-3.1",
1316      "name": "Mistral: Mistral Medium 3.1",
1317      "cost_per_1m_in": 0.39999999999999997,
1318      "cost_per_1m_out": 2,
1319      "cost_per_1m_in_cached": 0,
1320      "cost_per_1m_out_cached": 0,
1321      "context_window": 131072,
1322      "default_max_tokens": 13107,
1323      "can_reason": false,
1324      "supports_attachments": true,
1325      "options": {}
1326    },
1327    {
1328      "id": "mistralai/mistral-nemo",
1329      "name": "Mistral: Mistral Nemo",
1330      "cost_per_1m_in": 0.15,
1331      "cost_per_1m_out": 0.15,
1332      "cost_per_1m_in_cached": 0,
1333      "cost_per_1m_out_cached": 0,
1334      "context_window": 131072,
1335      "default_max_tokens": 13107,
1336      "can_reason": false,
1337      "supports_attachments": false,
1338      "options": {}
1339    },
1340    {
1341      "id": "mistralai/mistral-small-24b-instruct-2501",
1342      "name": "Mistral: Mistral Small 3",
1343      "cost_per_1m_in": 0.09999999999999999,
1344      "cost_per_1m_out": 0.3,
1345      "cost_per_1m_in_cached": 0,
1346      "cost_per_1m_out_cached": 0,
1347      "context_window": 32768,
1348      "default_max_tokens": 3276,
1349      "can_reason": false,
1350      "supports_attachments": false,
1351      "options": {}
1352    },
1353    {
1354      "id": "mistralai/mistral-small-3.1-24b-instruct",
1355      "name": "Mistral: Mistral Small 3.1 24B",
1356      "cost_per_1m_in": 0.09999999999999999,
1357      "cost_per_1m_out": 0.3,
1358      "cost_per_1m_in_cached": 0,
1359      "cost_per_1m_out_cached": 0,
1360      "context_window": 131072,
1361      "default_max_tokens": 13107,
1362      "can_reason": false,
1363      "supports_attachments": true,
1364      "options": {}
1365    },
1366    {
1367      "id": "mistralai/mistral-small-3.1-24b-instruct:free",
1368      "name": "Mistral: Mistral Small 3.1 24B (free)",
1369      "cost_per_1m_in": 0,
1370      "cost_per_1m_out": 0,
1371      "cost_per_1m_in_cached": 0,
1372      "cost_per_1m_out_cached": 0,
1373      "context_window": 96000,
1374      "default_max_tokens": 48000,
1375      "can_reason": false,
1376      "supports_attachments": true,
1377      "options": {}
1378    },
1379    {
1380      "id": "mistralai/mistral-small-3.2-24b-instruct",
1381      "name": "Mistral: Mistral Small 3.2 24B",
1382      "cost_per_1m_in": 0.09999999999999999,
1383      "cost_per_1m_out": 0.3,
1384      "cost_per_1m_in_cached": 0,
1385      "cost_per_1m_out_cached": 0,
1386      "context_window": 131072,
1387      "default_max_tokens": 13107,
1388      "can_reason": false,
1389      "supports_attachments": true,
1390      "options": {}
1391    },
1392    {
1393      "id": "mistralai/mistral-small-3.2-24b-instruct:free",
1394      "name": "Mistral: Mistral Small 3.2 24B (free)",
1395      "cost_per_1m_in": 0,
1396      "cost_per_1m_out": 0,
1397      "cost_per_1m_in_cached": 0,
1398      "cost_per_1m_out_cached": 0,
1399      "context_window": 131072,
1400      "default_max_tokens": 13107,
1401      "can_reason": false,
1402      "supports_attachments": true,
1403      "options": {}
1404    },
1405    {
1406      "id": "mistralai/mixtral-8x22b-instruct",
1407      "name": "Mistral: Mixtral 8x22B Instruct",
1408      "cost_per_1m_in": 2,
1409      "cost_per_1m_out": 6,
1410      "cost_per_1m_in_cached": 0,
1411      "cost_per_1m_out_cached": 0,
1412      "context_window": 65536,
1413      "default_max_tokens": 6553,
1414      "can_reason": false,
1415      "supports_attachments": false,
1416      "options": {}
1417    },
1418    {
1419      "id": "mistralai/mixtral-8x7b-instruct",
1420      "name": "Mistral: Mixtral 8x7B Instruct",
1421      "cost_per_1m_in": 0.54,
1422      "cost_per_1m_out": 0.54,
1423      "cost_per_1m_in_cached": 0,
1424      "cost_per_1m_out_cached": 0,
1425      "context_window": 32768,
1426      "default_max_tokens": 8192,
1427      "can_reason": false,
1428      "supports_attachments": false,
1429      "options": {}
1430    },
1431    {
1432      "id": "mistralai/pixtral-large-2411",
1433      "name": "Mistral: Pixtral Large 2411",
1434      "cost_per_1m_in": 2,
1435      "cost_per_1m_out": 6,
1436      "cost_per_1m_in_cached": 0,
1437      "cost_per_1m_out_cached": 0,
1438      "context_window": 131072,
1439      "default_max_tokens": 13107,
1440      "can_reason": false,
1441      "supports_attachments": true,
1442      "options": {}
1443    },
1444    {
1445      "id": "mistralai/mistral-saba",
1446      "name": "Mistral: Saba",
1447      "cost_per_1m_in": 0.19999999999999998,
1448      "cost_per_1m_out": 0.6,
1449      "cost_per_1m_in_cached": 0,
1450      "cost_per_1m_out_cached": 0,
1451      "context_window": 32768,
1452      "default_max_tokens": 3276,
1453      "can_reason": false,
1454      "supports_attachments": false,
1455      "options": {}
1456    },
1457    {
1458      "id": "mistralai/voxtral-small-24b-2507",
1459      "name": "Mistral: Voxtral Small 24B 2507",
1460      "cost_per_1m_in": 0.09999999999999999,
1461      "cost_per_1m_out": 0.3,
1462      "cost_per_1m_in_cached": 0,
1463      "cost_per_1m_out_cached": 0,
1464      "context_window": 32000,
1465      "default_max_tokens": 3200,
1466      "can_reason": false,
1467      "supports_attachments": false,
1468      "options": {}
1469    },
1470    {
1471      "id": "moonshotai/kimi-k2",
1472      "name": "MoonshotAI: Kimi K2 0711",
1473      "cost_per_1m_in": 0.6,
1474      "cost_per_1m_out": 2.5,
1475      "cost_per_1m_in_cached": 0,
1476      "cost_per_1m_out_cached": 0.15,
1477      "context_window": 131072,
1478      "default_max_tokens": 13107,
1479      "can_reason": false,
1480      "supports_attachments": false,
1481      "options": {}
1482    },
1483    {
1484      "id": "moonshotai/kimi-k2-0905",
1485      "name": "MoonshotAI: Kimi K2 0905",
1486      "cost_per_1m_in": 1,
1487      "cost_per_1m_out": 3,
1488      "cost_per_1m_in_cached": 0,
1489      "cost_per_1m_out_cached": 0.5,
1490      "context_window": 262144,
1491      "default_max_tokens": 8192,
1492      "can_reason": false,
1493      "supports_attachments": false,
1494      "options": {}
1495    },
1496    {
1497      "id": "moonshotai/kimi-k2-0905:exacto",
1498      "name": "MoonshotAI: Kimi K2 0905 (exacto)",
1499      "cost_per_1m_in": 0.6,
1500      "cost_per_1m_out": 2.5,
1501      "cost_per_1m_in_cached": 0,
1502      "cost_per_1m_out_cached": 0,
1503      "context_window": 262144,
1504      "default_max_tokens": 26214,
1505      "can_reason": false,
1506      "supports_attachments": false,
1507      "options": {}
1508    },
1509    {
1510      "id": "moonshotai/kimi-k2-thinking",
1511      "name": "MoonshotAI: Kimi K2 Thinking",
1512      "cost_per_1m_in": 1.15,
1513      "cost_per_1m_out": 8,
1514      "cost_per_1m_in_cached": 0,
1515      "cost_per_1m_out_cached": 0.15,
1516      "context_window": 262144,
1517      "default_max_tokens": 131072,
1518      "can_reason": true,
1519      "reasoning_levels": [
1520        "low",
1521        "medium",
1522        "high"
1523      ],
1524      "default_reasoning_effort": "medium",
1525      "supports_attachments": false,
1526      "options": {}
1527    },
1528    {
1529      "id": "nvidia/llama-3.1-nemotron-70b-instruct",
1530      "name": "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
1531      "cost_per_1m_in": 0.6,
1532      "cost_per_1m_out": 0.6,
1533      "cost_per_1m_in_cached": 0,
1534      "cost_per_1m_out_cached": 0,
1535      "context_window": 131072,
1536      "default_max_tokens": 8192,
1537      "can_reason": false,
1538      "supports_attachments": false,
1539      "options": {}
1540    },
1541    {
1542      "id": "nvidia/llama-3.3-nemotron-super-49b-v1.5",
1543      "name": "NVIDIA: Llama 3.3 Nemotron Super 49B V1.5",
1544      "cost_per_1m_in": 0.09999999999999999,
1545      "cost_per_1m_out": 0.39999999999999997,
1546      "cost_per_1m_in_cached": 0,
1547      "cost_per_1m_out_cached": 0,
1548      "context_window": 131072,
1549      "default_max_tokens": 13107,
1550      "can_reason": true,
1551      "reasoning_levels": [
1552        "low",
1553        "medium",
1554        "high"
1555      ],
1556      "default_reasoning_effort": "medium",
1557      "supports_attachments": false,
1558      "options": {}
1559    },
1560    {
1561      "id": "nvidia/nemotron-nano-12b-v2-vl:free",
1562      "name": "NVIDIA: Nemotron Nano 12B 2 VL (free)",
1563      "cost_per_1m_in": 0,
1564      "cost_per_1m_out": 0,
1565      "cost_per_1m_in_cached": 0,
1566      "cost_per_1m_out_cached": 0,
1567      "context_window": 128000,
1568      "default_max_tokens": 64000,
1569      "can_reason": true,
1570      "reasoning_levels": [
1571        "low",
1572        "medium",
1573        "high"
1574      ],
1575      "default_reasoning_effort": "medium",
1576      "supports_attachments": true,
1577      "options": {}
1578    },
1579    {
1580      "id": "nvidia/nemotron-nano-9b-v2",
1581      "name": "NVIDIA: Nemotron Nano 9B V2",
1582      "cost_per_1m_in": 0.04,
1583      "cost_per_1m_out": 0.16,
1584      "cost_per_1m_in_cached": 0,
1585      "cost_per_1m_out_cached": 0,
1586      "context_window": 131072,
1587      "default_max_tokens": 13107,
1588      "can_reason": true,
1589      "reasoning_levels": [
1590        "low",
1591        "medium",
1592        "high"
1593      ],
1594      "default_reasoning_effort": "medium",
1595      "supports_attachments": false,
1596      "options": {}
1597    },
1598    {
1599      "id": "nvidia/nemotron-nano-9b-v2:free",
1600      "name": "NVIDIA: Nemotron Nano 9B V2 (free)",
1601      "cost_per_1m_in": 0,
1602      "cost_per_1m_out": 0,
1603      "cost_per_1m_in_cached": 0,
1604      "cost_per_1m_out_cached": 0,
1605      "context_window": 128000,
1606      "default_max_tokens": 12800,
1607      "can_reason": true,
1608      "reasoning_levels": [
1609        "low",
1610        "medium",
1611        "high"
1612      ],
1613      "default_reasoning_effort": "medium",
1614      "supports_attachments": false,
1615      "options": {}
1616    },
1617    {
1618      "id": "nousresearch/deephermes-3-mistral-24b-preview",
1619      "name": "Nous: DeepHermes 3 Mistral 24B Preview",
1620      "cost_per_1m_in": 0.15,
1621      "cost_per_1m_out": 0.59,
1622      "cost_per_1m_in_cached": 0,
1623      "cost_per_1m_out_cached": 0,
1624      "context_window": 32768,
1625      "default_max_tokens": 16384,
1626      "can_reason": true,
1627      "reasoning_levels": [
1628        "low",
1629        "medium",
1630        "high"
1631      ],
1632      "default_reasoning_effort": "medium",
1633      "supports_attachments": false,
1634      "options": {}
1635    },
1636    {
1637      "id": "nousresearch/hermes-3-llama-3.1-70b",
1638      "name": "Nous: Hermes 3 70B Instruct",
1639      "cost_per_1m_in": 0.39999999999999997,
1640      "cost_per_1m_out": 0.39999999999999997,
1641      "cost_per_1m_in_cached": 0,
1642      "cost_per_1m_out_cached": 0,
1643      "context_window": 12288,
1644      "default_max_tokens": 1228,
1645      "can_reason": false,
1646      "supports_attachments": false,
1647      "options": {}
1648    },
1649    {
1650      "id": "nousresearch/hermes-4-405b",
1651      "name": "Nous: Hermes 4 405B",
1652      "cost_per_1m_in": 0.3,
1653      "cost_per_1m_out": 1.2,
1654      "cost_per_1m_in_cached": 0,
1655      "cost_per_1m_out_cached": 0,
1656      "context_window": 131072,
1657      "default_max_tokens": 65536,
1658      "can_reason": true,
1659      "reasoning_levels": [
1660        "low",
1661        "medium",
1662        "high"
1663      ],
1664      "default_reasoning_effort": "medium",
1665      "supports_attachments": false,
1666      "options": {}
1667    },
1668    {
1669      "id": "openai/codex-mini",
1670      "name": "OpenAI: Codex Mini",
1671      "cost_per_1m_in": 1.5,
1672      "cost_per_1m_out": 6,
1673      "cost_per_1m_in_cached": 0,
1674      "cost_per_1m_out_cached": 0.375,
1675      "context_window": 200000,
1676      "default_max_tokens": 50000,
1677      "can_reason": true,
1678      "reasoning_levels": [
1679        "low",
1680        "medium",
1681        "high"
1682      ],
1683      "default_reasoning_effort": "medium",
1684      "supports_attachments": true,
1685      "options": {}
1686    },
1687    {
1688      "id": "openai/gpt-4-turbo",
1689      "name": "OpenAI: GPT-4 Turbo",
1690      "cost_per_1m_in": 10,
1691      "cost_per_1m_out": 30,
1692      "cost_per_1m_in_cached": 0,
1693      "cost_per_1m_out_cached": 0,
1694      "context_window": 128000,
1695      "default_max_tokens": 2048,
1696      "can_reason": false,
1697      "supports_attachments": true,
1698      "options": {}
1699    },
1700    {
1701      "id": "openai/gpt-4-1106-preview",
1702      "name": "OpenAI: GPT-4 Turbo (older v1106)",
1703      "cost_per_1m_in": 10,
1704      "cost_per_1m_out": 30,
1705      "cost_per_1m_in_cached": 0,
1706      "cost_per_1m_out_cached": 0,
1707      "context_window": 128000,
1708      "default_max_tokens": 2048,
1709      "can_reason": false,
1710      "supports_attachments": false,
1711      "options": {}
1712    },
1713    {
1714      "id": "openai/gpt-4-turbo-preview",
1715      "name": "OpenAI: GPT-4 Turbo Preview",
1716      "cost_per_1m_in": 10,
1717      "cost_per_1m_out": 30,
1718      "cost_per_1m_in_cached": 0,
1719      "cost_per_1m_out_cached": 0,
1720      "context_window": 128000,
1721      "default_max_tokens": 2048,
1722      "can_reason": false,
1723      "supports_attachments": false,
1724      "options": {}
1725    },
1726    {
1727      "id": "openai/gpt-4.1",
1728      "name": "OpenAI: GPT-4.1",
1729      "cost_per_1m_in": 2,
1730      "cost_per_1m_out": 8,
1731      "cost_per_1m_in_cached": 0,
1732      "cost_per_1m_out_cached": 0.5,
1733      "context_window": 1047576,
1734      "default_max_tokens": 104757,
1735      "can_reason": false,
1736      "supports_attachments": true,
1737      "options": {}
1738    },
1739    {
1740      "id": "openai/gpt-4.1-mini",
1741      "name": "OpenAI: GPT-4.1 Mini",
1742      "cost_per_1m_in": 0.39999999999999997,
1743      "cost_per_1m_out": 1.5999999999999999,
1744      "cost_per_1m_in_cached": 0,
1745      "cost_per_1m_out_cached": 0.09999999999999999,
1746      "context_window": 1047576,
1747      "default_max_tokens": 104757,
1748      "can_reason": false,
1749      "supports_attachments": true,
1750      "options": {}
1751    },
1752    {
1753      "id": "openai/gpt-4.1-nano",
1754      "name": "OpenAI: GPT-4.1 Nano",
1755      "cost_per_1m_in": 0.09999999999999999,
1756      "cost_per_1m_out": 0.39999999999999997,
1757      "cost_per_1m_in_cached": 0,
1758      "cost_per_1m_out_cached": 0.03,
1759      "context_window": 1047576,
1760      "default_max_tokens": 104757,
1761      "can_reason": false,
1762      "supports_attachments": true,
1763      "options": {}
1764    },
1765    {
1766      "id": "openai/gpt-4o",
1767      "name": "OpenAI: GPT-4o",
1768      "cost_per_1m_in": 2.5,
1769      "cost_per_1m_out": 10,
1770      "cost_per_1m_in_cached": 0,
1771      "cost_per_1m_out_cached": 0,
1772      "context_window": 128000,
1773      "default_max_tokens": 8192,
1774      "can_reason": false,
1775      "supports_attachments": true,
1776      "options": {}
1777    },
1778    {
1779      "id": "openai/gpt-4o-2024-05-13",
1780      "name": "OpenAI: GPT-4o (2024-05-13)",
1781      "cost_per_1m_in": 5,
1782      "cost_per_1m_out": 15,
1783      "cost_per_1m_in_cached": 0,
1784      "cost_per_1m_out_cached": 0,
1785      "context_window": 128000,
1786      "default_max_tokens": 2048,
1787      "can_reason": false,
1788      "supports_attachments": true,
1789      "options": {}
1790    },
1791    {
1792      "id": "openai/gpt-4o-2024-08-06",
1793      "name": "OpenAI: GPT-4o (2024-08-06)",
1794      "cost_per_1m_in": 2.5,
1795      "cost_per_1m_out": 10,
1796      "cost_per_1m_in_cached": 0,
1797      "cost_per_1m_out_cached": 1.25,
1798      "context_window": 128000,
1799      "default_max_tokens": 8192,
1800      "can_reason": false,
1801      "supports_attachments": true,
1802      "options": {}
1803    },
1804    {
1805      "id": "openai/gpt-4o-2024-11-20",
1806      "name": "OpenAI: GPT-4o (2024-11-20)",
1807      "cost_per_1m_in": 2.5,
1808      "cost_per_1m_out": 10,
1809      "cost_per_1m_in_cached": 0,
1810      "cost_per_1m_out_cached": 1.25,
1811      "context_window": 128000,
1812      "default_max_tokens": 8192,
1813      "can_reason": false,
1814      "supports_attachments": true,
1815      "options": {}
1816    },
1817    {
1818      "id": "openai/gpt-4o:extended",
1819      "name": "OpenAI: GPT-4o (extended)",
1820      "cost_per_1m_in": 6,
1821      "cost_per_1m_out": 18,
1822      "cost_per_1m_in_cached": 0,
1823      "cost_per_1m_out_cached": 0,
1824      "context_window": 128000,
1825      "default_max_tokens": 32000,
1826      "can_reason": false,
1827      "supports_attachments": true,
1828      "options": {}
1829    },
1830    {
1831      "id": "openai/gpt-4o-audio-preview",
1832      "name": "OpenAI: GPT-4o Audio",
1833      "cost_per_1m_in": 2.5,
1834      "cost_per_1m_out": 10,
1835      "cost_per_1m_in_cached": 0,
1836      "cost_per_1m_out_cached": 0,
1837      "context_window": 128000,
1838      "default_max_tokens": 8192,
1839      "can_reason": false,
1840      "supports_attachments": false,
1841      "options": {}
1842    },
1843    {
1844      "id": "openai/gpt-4o-mini",
1845      "name": "OpenAI: GPT-4o-mini",
1846      "cost_per_1m_in": 0.15,
1847      "cost_per_1m_out": 0.6,
1848      "cost_per_1m_in_cached": 0,
1849      "cost_per_1m_out_cached": 0.075,
1850      "context_window": 128000,
1851      "default_max_tokens": 8192,
1852      "can_reason": false,
1853      "supports_attachments": true,
1854      "options": {}
1855    },
1856    {
1857      "id": "openai/gpt-4o-mini-2024-07-18",
1858      "name": "OpenAI: GPT-4o-mini (2024-07-18)",
1859      "cost_per_1m_in": 0.15,
1860      "cost_per_1m_out": 0.6,
1861      "cost_per_1m_in_cached": 0,
1862      "cost_per_1m_out_cached": 0.075,
1863      "context_window": 128000,
1864      "default_max_tokens": 8192,
1865      "can_reason": false,
1866      "supports_attachments": true,
1867      "options": {}
1868    },
1869    {
1870      "id": "openai/gpt-5",
1871      "name": "OpenAI: GPT-5",
1872      "cost_per_1m_in": 1.25,
1873      "cost_per_1m_out": 10,
1874      "cost_per_1m_in_cached": 0,
1875      "cost_per_1m_out_cached": 0.125,
1876      "context_window": 400000,
1877      "default_max_tokens": 64000,
1878      "can_reason": true,
1879      "reasoning_levels": [
1880        "low",
1881        "medium",
1882        "high"
1883      ],
1884      "default_reasoning_effort": "medium",
1885      "supports_attachments": true,
1886      "options": {}
1887    },
1888    {
1889      "id": "openai/gpt-5-codex",
1890      "name": "OpenAI: GPT-5 Codex",
1891      "cost_per_1m_in": 1.25,
1892      "cost_per_1m_out": 10,
1893      "cost_per_1m_in_cached": 0,
1894      "cost_per_1m_out_cached": 0.125,
1895      "context_window": 400000,
1896      "default_max_tokens": 64000,
1897      "can_reason": true,
1898      "reasoning_levels": [
1899        "low",
1900        "medium",
1901        "high"
1902      ],
1903      "default_reasoning_effort": "medium",
1904      "supports_attachments": true,
1905      "options": {}
1906    },
1907    {
1908      "id": "openai/gpt-5-image",
1909      "name": "OpenAI: GPT-5 Image",
1910      "cost_per_1m_in": 10,
1911      "cost_per_1m_out": 10,
1912      "cost_per_1m_in_cached": 0,
1913      "cost_per_1m_out_cached": 1.25,
1914      "context_window": 400000,
1915      "default_max_tokens": 64000,
1916      "can_reason": true,
1917      "reasoning_levels": [
1918        "low",
1919        "medium",
1920        "high"
1921      ],
1922      "default_reasoning_effort": "medium",
1923      "supports_attachments": true,
1924      "options": {}
1925    },
1926    {
1927      "id": "openai/gpt-5-image-mini",
1928      "name": "OpenAI: GPT-5 Image Mini",
1929      "cost_per_1m_in": 2.5,
1930      "cost_per_1m_out": 2,
1931      "cost_per_1m_in_cached": 0,
1932      "cost_per_1m_out_cached": 0.25,
1933      "context_window": 400000,
1934      "default_max_tokens": 64000,
1935      "can_reason": true,
1936      "reasoning_levels": [
1937        "low",
1938        "medium",
1939        "high"
1940      ],
1941      "default_reasoning_effort": "medium",
1942      "supports_attachments": true,
1943      "options": {}
1944    },
1945    {
1946      "id": "openai/gpt-5-mini",
1947      "name": "OpenAI: GPT-5 Mini",
1948      "cost_per_1m_in": 0.25,
1949      "cost_per_1m_out": 2,
1950      "cost_per_1m_in_cached": 0,
1951      "cost_per_1m_out_cached": 0.03,
1952      "context_window": 400000,
1953      "default_max_tokens": 40000,
1954      "can_reason": true,
1955      "reasoning_levels": [
1956        "low",
1957        "medium",
1958        "high"
1959      ],
1960      "default_reasoning_effort": "medium",
1961      "supports_attachments": true,
1962      "options": {}
1963    },
1964    {
1965      "id": "openai/gpt-5-nano",
1966      "name": "OpenAI: GPT-5 Nano",
1967      "cost_per_1m_in": 0.049999999999999996,
1968      "cost_per_1m_out": 0.39999999999999997,
1969      "cost_per_1m_in_cached": 0,
1970      "cost_per_1m_out_cached": 0.005,
1971      "context_window": 400000,
1972      "default_max_tokens": 64000,
1973      "can_reason": true,
1974      "reasoning_levels": [
1975        "low",
1976        "medium",
1977        "high"
1978      ],
1979      "default_reasoning_effort": "medium",
1980      "supports_attachments": true,
1981      "options": {}
1982    },
1983    {
1984      "id": "openai/gpt-5-pro",
1985      "name": "OpenAI: GPT-5 Pro",
1986      "cost_per_1m_in": 15,
1987      "cost_per_1m_out": 120,
1988      "cost_per_1m_in_cached": 0,
1989      "cost_per_1m_out_cached": 0,
1990      "context_window": 400000,
1991      "default_max_tokens": 64000,
1992      "can_reason": true,
1993      "reasoning_levels": [
1994        "low",
1995        "medium",
1996        "high"
1997      ],
1998      "default_reasoning_effort": "medium",
1999      "supports_attachments": true,
2000      "options": {}
2001    },
2002    {
2003      "id": "openai/gpt-oss-120b",
2004      "name": "OpenAI: gpt-oss-120b",
2005      "cost_per_1m_in": 0.049999999999999996,
2006      "cost_per_1m_out": 0.25,
2007      "cost_per_1m_in_cached": 0,
2008      "cost_per_1m_out_cached": 0,
2009      "context_window": 131072,
2010      "default_max_tokens": 16384,
2011      "can_reason": true,
2012      "reasoning_levels": [
2013        "low",
2014        "medium",
2015        "high"
2016      ],
2017      "default_reasoning_effort": "medium",
2018      "supports_attachments": false,
2019      "options": {}
2020    },
2021    {
2022      "id": "openai/gpt-oss-120b:exacto",
2023      "name": "OpenAI: gpt-oss-120b (exacto)",
2024      "cost_per_1m_in": 0.049999999999999996,
2025      "cost_per_1m_out": 0.24,
2026      "cost_per_1m_in_cached": 0,
2027      "cost_per_1m_out_cached": 0,
2028      "context_window": 131072,
2029      "default_max_tokens": 13107,
2030      "can_reason": true,
2031      "reasoning_levels": [
2032        "low",
2033        "medium",
2034        "high"
2035      ],
2036      "default_reasoning_effort": "medium",
2037      "supports_attachments": false,
2038      "options": {}
2039    },
2040    {
2041      "id": "openai/gpt-oss-20b",
2042      "name": "OpenAI: gpt-oss-20b",
2043      "cost_per_1m_in": 0.04,
2044      "cost_per_1m_out": 0.15,
2045      "cost_per_1m_in_cached": 0,
2046      "cost_per_1m_out_cached": 0,
2047      "context_window": 131072,
2048      "default_max_tokens": 13107,
2049      "can_reason": true,
2050      "reasoning_levels": [
2051        "low",
2052        "medium",
2053        "high"
2054      ],
2055      "default_reasoning_effort": "medium",
2056      "supports_attachments": false,
2057      "options": {}
2058    },
2059    {
2060      "id": "openai/gpt-oss-20b:free",
2061      "name": "OpenAI: gpt-oss-20b (free)",
2062      "cost_per_1m_in": 0,
2063      "cost_per_1m_out": 0,
2064      "cost_per_1m_in_cached": 0,
2065      "cost_per_1m_out_cached": 0,
2066      "context_window": 131072,
2067      "default_max_tokens": 65536,
2068      "can_reason": true,
2069      "reasoning_levels": [
2070        "low",
2071        "medium",
2072        "high"
2073      ],
2074      "default_reasoning_effort": "medium",
2075      "supports_attachments": false,
2076      "options": {}
2077    },
2078    {
2079      "id": "openai/gpt-oss-safeguard-20b",
2080      "name": "OpenAI: gpt-oss-safeguard-20b",
2081      "cost_per_1m_in": 0.075,
2082      "cost_per_1m_out": 0.3,
2083      "cost_per_1m_in_cached": 0,
2084      "cost_per_1m_out_cached": 0.037,
2085      "context_window": 131072,
2086      "default_max_tokens": 32768,
2087      "can_reason": true,
2088      "reasoning_levels": [
2089        "low",
2090        "medium",
2091        "high"
2092      ],
2093      "default_reasoning_effort": "medium",
2094      "supports_attachments": false,
2095      "options": {}
2096    },
2097    {
2098      "id": "openai/o1",
2099      "name": "OpenAI: o1",
2100      "cost_per_1m_in": 15,
2101      "cost_per_1m_out": 60,
2102      "cost_per_1m_in_cached": 0,
2103      "cost_per_1m_out_cached": 7.5,
2104      "context_window": 200000,
2105      "default_max_tokens": 50000,
2106      "can_reason": false,
2107      "supports_attachments": true,
2108      "options": {}
2109    },
2110    {
2111      "id": "openai/o3",
2112      "name": "OpenAI: o3",
2113      "cost_per_1m_in": 2,
2114      "cost_per_1m_out": 8,
2115      "cost_per_1m_in_cached": 0,
2116      "cost_per_1m_out_cached": 0.5,
2117      "context_window": 200000,
2118      "default_max_tokens": 50000,
2119      "can_reason": true,
2120      "reasoning_levels": [
2121        "low",
2122        "medium",
2123        "high"
2124      ],
2125      "default_reasoning_effort": "medium",
2126      "supports_attachments": true,
2127      "options": {}
2128    },
2129    {
2130      "id": "openai/o3-deep-research",
2131      "name": "OpenAI: o3 Deep Research",
2132      "cost_per_1m_in": 10,
2133      "cost_per_1m_out": 40,
2134      "cost_per_1m_in_cached": 0,
2135      "cost_per_1m_out_cached": 2.5,
2136      "context_window": 200000,
2137      "default_max_tokens": 50000,
2138      "can_reason": true,
2139      "reasoning_levels": [
2140        "low",
2141        "medium",
2142        "high"
2143      ],
2144      "default_reasoning_effort": "medium",
2145      "supports_attachments": true,
2146      "options": {}
2147    },
2148    {
2149      "id": "openai/o3-mini",
2150      "name": "OpenAI: o3 Mini",
2151      "cost_per_1m_in": 1.1,
2152      "cost_per_1m_out": 4.4,
2153      "cost_per_1m_in_cached": 0,
2154      "cost_per_1m_out_cached": 0.55,
2155      "context_window": 200000,
2156      "default_max_tokens": 50000,
2157      "can_reason": false,
2158      "supports_attachments": false,
2159      "options": {}
2160    },
2161    {
2162      "id": "openai/o3-mini-high",
2163      "name": "OpenAI: o3 Mini High",
2164      "cost_per_1m_in": 1.1,
2165      "cost_per_1m_out": 4.4,
2166      "cost_per_1m_in_cached": 0,
2167      "cost_per_1m_out_cached": 0.55,
2168      "context_window": 200000,
2169      "default_max_tokens": 50000,
2170      "can_reason": false,
2171      "supports_attachments": false,
2172      "options": {}
2173    },
2174    {
2175      "id": "openai/o3-pro",
2176      "name": "OpenAI: o3 Pro",
2177      "cost_per_1m_in": 20,
2178      "cost_per_1m_out": 80,
2179      "cost_per_1m_in_cached": 0,
2180      "cost_per_1m_out_cached": 0,
2181      "context_window": 200000,
2182      "default_max_tokens": 50000,
2183      "can_reason": true,
2184      "reasoning_levels": [
2185        "low",
2186        "medium",
2187        "high"
2188      ],
2189      "default_reasoning_effort": "medium",
2190      "supports_attachments": true,
2191      "options": {}
2192    },
2193    {
2194      "id": "openai/o4-mini",
2195      "name": "OpenAI: o4 Mini",
2196      "cost_per_1m_in": 1.1,
2197      "cost_per_1m_out": 4.4,
2198      "cost_per_1m_in_cached": 0,
2199      "cost_per_1m_out_cached": 0.275,
2200      "context_window": 200000,
2201      "default_max_tokens": 50000,
2202      "can_reason": true,
2203      "reasoning_levels": [
2204        "low",
2205        "medium",
2206        "high"
2207      ],
2208      "default_reasoning_effort": "medium",
2209      "supports_attachments": true,
2210      "options": {}
2211    },
2212    {
2213      "id": "openai/o4-mini-deep-research",
2214      "name": "OpenAI: o4 Mini Deep Research",
2215      "cost_per_1m_in": 2,
2216      "cost_per_1m_out": 8,
2217      "cost_per_1m_in_cached": 0,
2218      "cost_per_1m_out_cached": 0.5,
2219      "context_window": 200000,
2220      "default_max_tokens": 50000,
2221      "can_reason": true,
2222      "reasoning_levels": [
2223        "low",
2224        "medium",
2225        "high"
2226      ],
2227      "default_reasoning_effort": "medium",
2228      "supports_attachments": true,
2229      "options": {}
2230    },
2231    {
2232      "id": "openai/o4-mini-high",
2233      "name": "OpenAI: o4 Mini High",
2234      "cost_per_1m_in": 1.1,
2235      "cost_per_1m_out": 4.4,
2236      "cost_per_1m_in_cached": 0,
2237      "cost_per_1m_out_cached": 0.275,
2238      "context_window": 200000,
2239      "default_max_tokens": 50000,
2240      "can_reason": true,
2241      "reasoning_levels": [
2242        "low",
2243        "medium",
2244        "high"
2245      ],
2246      "default_reasoning_effort": "medium",
2247      "supports_attachments": true,
2248      "options": {}
2249    },
2250    {
2251      "id": "openrouter/polaris-alpha",
2252      "name": "Polaris Alpha",
2253      "cost_per_1m_in": 0,
2254      "cost_per_1m_out": 0,
2255      "cost_per_1m_in_cached": 0,
2256      "cost_per_1m_out_cached": 0,
2257      "context_window": 256000,
2258      "default_max_tokens": 64000,
2259      "can_reason": false,
2260      "supports_attachments": true,
2261      "options": {}
2262    },
2263    {
2264      "id": "qwen/qwen-2.5-72b-instruct",
2265      "name": "Qwen2.5 72B Instruct",
2266      "cost_per_1m_in": 0.07,
2267      "cost_per_1m_out": 0.26,
2268      "cost_per_1m_in_cached": 0,
2269      "cost_per_1m_out_cached": 0,
2270      "context_window": 32768,
2271      "default_max_tokens": 16384,
2272      "can_reason": false,
2273      "supports_attachments": false,
2274      "options": {}
2275    },
2276    {
2277      "id": "qwen/qwq-32b",
2278      "name": "Qwen: QwQ 32B",
2279      "cost_per_1m_in": 0.15,
2280      "cost_per_1m_out": 0.58,
2281      "cost_per_1m_in_cached": 0,
2282      "cost_per_1m_out_cached": 0,
2283      "context_window": 131072,
2284      "default_max_tokens": 65536,
2285      "can_reason": true,
2286      "reasoning_levels": [
2287        "low",
2288        "medium",
2289        "high"
2290      ],
2291      "default_reasoning_effort": "medium",
2292      "supports_attachments": false,
2293      "options": {}
2294    },
2295    {
2296      "id": "qwen/qwen-plus-2025-07-28",
2297      "name": "Qwen: Qwen Plus 0728",
2298      "cost_per_1m_in": 0.39999999999999997,
2299      "cost_per_1m_out": 1.2,
2300      "cost_per_1m_in_cached": 0,
2301      "cost_per_1m_out_cached": 0,
2302      "context_window": 1000000,
2303      "default_max_tokens": 16384,
2304      "can_reason": false,
2305      "supports_attachments": false,
2306      "options": {}
2307    },
2308    {
2309      "id": "qwen/qwen-plus-2025-07-28:thinking",
2310      "name": "Qwen: Qwen Plus 0728 (thinking)",
2311      "cost_per_1m_in": 0.39999999999999997,
2312      "cost_per_1m_out": 4,
2313      "cost_per_1m_in_cached": 0,
2314      "cost_per_1m_out_cached": 0,
2315      "context_window": 1000000,
2316      "default_max_tokens": 16384,
2317      "can_reason": true,
2318      "reasoning_levels": [
2319        "low",
2320        "medium",
2321        "high"
2322      ],
2323      "default_reasoning_effort": "medium",
2324      "supports_attachments": false,
2325      "options": {}
2326    },
2327    {
2328      "id": "qwen/qwen-vl-max",
2329      "name": "Qwen: Qwen VL Max",
2330      "cost_per_1m_in": 0.7999999999999999,
2331      "cost_per_1m_out": 3.1999999999999997,
2332      "cost_per_1m_in_cached": 0,
2333      "cost_per_1m_out_cached": 0,
2334      "context_window": 131072,
2335      "default_max_tokens": 4096,
2336      "can_reason": false,
2337      "supports_attachments": true,
2338      "options": {}
2339    },
2340    {
2341      "id": "qwen/qwen-max",
2342      "name": "Qwen: Qwen-Max ",
2343      "cost_per_1m_in": 1.5999999999999999,
2344      "cost_per_1m_out": 6.3999999999999995,
2345      "cost_per_1m_in_cached": 0,
2346      "cost_per_1m_out_cached": 0.64,
2347      "context_window": 32768,
2348      "default_max_tokens": 4096,
2349      "can_reason": false,
2350      "supports_attachments": false,
2351      "options": {}
2352    },
2353    {
2354      "id": "qwen/qwen-plus",
2355      "name": "Qwen: Qwen-Plus",
2356      "cost_per_1m_in": 0.39999999999999997,
2357      "cost_per_1m_out": 1.2,
2358      "cost_per_1m_in_cached": 0,
2359      "cost_per_1m_out_cached": 0.16,
2360      "context_window": 131072,
2361      "default_max_tokens": 4096,
2362      "can_reason": false,
2363      "supports_attachments": false,
2364      "options": {}
2365    },
2366    {
2367      "id": "qwen/qwen-turbo",
2368      "name": "Qwen: Qwen-Turbo",
2369      "cost_per_1m_in": 0.049999999999999996,
2370      "cost_per_1m_out": 0.19999999999999998,
2371      "cost_per_1m_in_cached": 0,
2372      "cost_per_1m_out_cached": 0.02,
2373      "context_window": 1000000,
2374      "default_max_tokens": 4096,
2375      "can_reason": false,
2376      "supports_attachments": false,
2377      "options": {}
2378    },
2379    {
2380      "id": "qwen/qwen-2.5-7b-instruct",
2381      "name": "Qwen: Qwen2.5 7B Instruct",
2382      "cost_per_1m_in": 0.07,
2383      "cost_per_1m_out": 0.07,
2384      "cost_per_1m_in_cached": 0,
2385      "cost_per_1m_out_cached": 0,
2386      "context_window": 32000,
2387      "default_max_tokens": 16000,
2388      "can_reason": false,
2389      "supports_attachments": false,
2390      "options": {}
2391    },
2392    {
2393      "id": "qwen/qwen3-14b",
2394      "name": "Qwen: Qwen3 14B",
2395      "cost_per_1m_in": 0.049999999999999996,
2396      "cost_per_1m_out": 0.22,
2397      "cost_per_1m_in_cached": 0,
2398      "cost_per_1m_out_cached": 0,
2399      "context_window": 40960,
2400      "default_max_tokens": 20480,
2401      "can_reason": true,
2402      "reasoning_levels": [
2403        "low",
2404        "medium",
2405        "high"
2406      ],
2407      "default_reasoning_effort": "medium",
2408      "supports_attachments": false,
2409      "options": {}
2410    },
2411    {
2412      "id": "qwen/qwen3-235b-a22b",
2413      "name": "Qwen: Qwen3 235B A22B",
2414      "cost_per_1m_in": 0.22,
2415      "cost_per_1m_out": 0.88,
2416      "cost_per_1m_in_cached": 0,
2417      "cost_per_1m_out_cached": 0,
2418      "context_window": 131072,
2419      "default_max_tokens": 8192,
2420      "can_reason": true,
2421      "reasoning_levels": [
2422        "low",
2423        "medium",
2424        "high"
2425      ],
2426      "default_reasoning_effort": "medium",
2427      "supports_attachments": false,
2428      "options": {}
2429    },
2430    {
2431      "id": "qwen/qwen3-235b-a22b:free",
2432      "name": "Qwen: Qwen3 235B A22B (free)",
2433      "cost_per_1m_in": 0,
2434      "cost_per_1m_out": 0,
2435      "cost_per_1m_in_cached": 0,
2436      "cost_per_1m_out_cached": 0,
2437      "context_window": 131072,
2438      "default_max_tokens": 13107,
2439      "can_reason": true,
2440      "reasoning_levels": [
2441        "low",
2442        "medium",
2443        "high"
2444      ],
2445      "default_reasoning_effort": "medium",
2446      "supports_attachments": false,
2447      "options": {}
2448    },
2449    {
2450      "id": "qwen/qwen3-235b-a22b-2507",
2451      "name": "Qwen: Qwen3 235B A22B Instruct 2507",
2452      "cost_per_1m_in": 0.19999999999999998,
2453      "cost_per_1m_out": 0.6,
2454      "cost_per_1m_in_cached": 0,
2455      "cost_per_1m_out_cached": 0,
2456      "context_window": 262144,
2457      "default_max_tokens": 26214,
2458      "can_reason": false,
2459      "supports_attachments": false,
2460      "options": {}
2461    },
2462    {
2463      "id": "qwen/qwen3-235b-a22b-thinking-2507",
2464      "name": "Qwen: Qwen3 235B A22B Thinking 2507",
2465      "cost_per_1m_in": 0.11,
2466      "cost_per_1m_out": 0.6,
2467      "cost_per_1m_in_cached": 0,
2468      "cost_per_1m_out_cached": 0,
2469      "context_window": 262144,
2470      "default_max_tokens": 131072,
2471      "can_reason": true,
2472      "reasoning_levels": [
2473        "low",
2474        "medium",
2475        "high"
2476      ],
2477      "default_reasoning_effort": "medium",
2478      "supports_attachments": false,
2479      "options": {}
2480    },
2481    {
2482      "id": "qwen/qwen3-30b-a3b",
2483      "name": "Qwen: Qwen3 30B A3B",
2484      "cost_per_1m_in": 0.08,
2485      "cost_per_1m_out": 0.28,
2486      "cost_per_1m_in_cached": 0,
2487      "cost_per_1m_out_cached": 0,
2488      "context_window": 131072,
2489      "default_max_tokens": 65536,
2490      "can_reason": true,
2491      "reasoning_levels": [
2492        "low",
2493        "medium",
2494        "high"
2495      ],
2496      "default_reasoning_effort": "medium",
2497      "supports_attachments": false,
2498      "options": {}
2499    },
2500    {
2501      "id": "qwen/qwen3-30b-a3b-instruct-2507",
2502      "name": "Qwen: Qwen3 30B A3B Instruct 2507",
2503      "cost_per_1m_in": 0.08,
2504      "cost_per_1m_out": 0.33,
2505      "cost_per_1m_in_cached": 0,
2506      "cost_per_1m_out_cached": 0,
2507      "context_window": 262144,
2508      "default_max_tokens": 131072,
2509      "can_reason": false,
2510      "supports_attachments": false,
2511      "options": {}
2512    },
2513    {
2514      "id": "qwen/qwen3-30b-a3b-thinking-2507",
2515      "name": "Qwen: Qwen3 30B A3B Thinking 2507",
2516      "cost_per_1m_in": 0.09,
2517      "cost_per_1m_out": 0.3,
2518      "cost_per_1m_in_cached": 0,
2519      "cost_per_1m_out_cached": 0,
2520      "context_window": 262144,
2521      "default_max_tokens": 65536,
2522      "can_reason": true,
2523      "reasoning_levels": [
2524        "low",
2525        "medium",
2526        "high"
2527      ],
2528      "default_reasoning_effort": "medium",
2529      "supports_attachments": false,
2530      "options": {}
2531    },
2532    {
2533      "id": "qwen/qwen3-32b",
2534      "name": "Qwen: Qwen3 32B",
2535      "cost_per_1m_in": 0.15,
2536      "cost_per_1m_out": 0.5,
2537      "cost_per_1m_in_cached": 0,
2538      "cost_per_1m_out_cached": 0,
2539      "context_window": 131072,
2540      "default_max_tokens": 4000,
2541      "can_reason": true,
2542      "reasoning_levels": [
2543        "low",
2544        "medium",
2545        "high"
2546      ],
2547      "default_reasoning_effort": "medium",
2548      "supports_attachments": false,
2549      "options": {}
2550    },
2551    {
2552      "id": "qwen/qwen3-4b:free",
2553      "name": "Qwen: Qwen3 4B (free)",
2554      "cost_per_1m_in": 0,
2555      "cost_per_1m_out": 0,
2556      "cost_per_1m_in_cached": 0,
2557      "cost_per_1m_out_cached": 0,
2558      "context_window": 40960,
2559      "default_max_tokens": 4096,
2560      "can_reason": true,
2561      "reasoning_levels": [
2562        "low",
2563        "medium",
2564        "high"
2565      ],
2566      "default_reasoning_effort": "medium",
2567      "supports_attachments": false,
2568      "options": {}
2569    },
2570    {
2571      "id": "qwen/qwen3-8b",
2572      "name": "Qwen: Qwen3 8B",
2573      "cost_per_1m_in": 0.2,
2574      "cost_per_1m_out": 0.2,
2575      "cost_per_1m_in_cached": 0,
2576      "cost_per_1m_out_cached": 0,
2577      "context_window": 40960,
2578      "default_max_tokens": 2560,
2579      "can_reason": true,
2580      "reasoning_levels": [
2581        "low",
2582        "medium",
2583        "high"
2584      ],
2585      "default_reasoning_effort": "medium",
2586      "supports_attachments": false,
2587      "options": {}
2588    },
2589    {
2590      "id": "qwen/qwen3-coder-30b-a3b-instruct",
2591      "name": "Qwen: Qwen3 Coder 30B A3B Instruct",
2592      "cost_per_1m_in": 0.06,
2593      "cost_per_1m_out": 0.25,
2594      "cost_per_1m_in_cached": 0,
2595      "cost_per_1m_out_cached": 0,
2596      "context_window": 262144,
2597      "default_max_tokens": 131072,
2598      "can_reason": false,
2599      "supports_attachments": false,
2600      "options": {}
2601    },
2602    {
2603      "id": "qwen/qwen3-coder",
2604      "name": "Qwen: Qwen3 Coder 480B A35B",
2605      "cost_per_1m_in": 0.22,
2606      "cost_per_1m_out": 0.95,
2607      "cost_per_1m_in_cached": 0,
2608      "cost_per_1m_out_cached": 0,
2609      "context_window": 262144,
2610      "default_max_tokens": 131072,
2611      "can_reason": false,
2612      "supports_attachments": false,
2613      "options": {}
2614    },
2615    {
2616      "id": "qwen/qwen3-coder:exacto",
2617      "name": "Qwen: Qwen3 Coder 480B A35B (exacto)",
2618      "cost_per_1m_in": 0.38,
2619      "cost_per_1m_out": 1.53,
2620      "cost_per_1m_in_cached": 0,
2621      "cost_per_1m_out_cached": 0,
2622      "context_window": 262144,
2623      "default_max_tokens": 131072,
2624      "can_reason": true,
2625      "reasoning_levels": [
2626        "low",
2627        "medium",
2628        "high"
2629      ],
2630      "default_reasoning_effort": "medium",
2631      "supports_attachments": false,
2632      "options": {}
2633    },
2634    {
2635      "id": "qwen/qwen3-coder:free",
2636      "name": "Qwen: Qwen3 Coder 480B A35B (free)",
2637      "cost_per_1m_in": 0,
2638      "cost_per_1m_out": 0,
2639      "cost_per_1m_in_cached": 0,
2640      "cost_per_1m_out_cached": 0,
2641      "context_window": 262144,
2642      "default_max_tokens": 26214,
2643      "can_reason": false,
2644      "supports_attachments": false,
2645      "options": {}
2646    },
2647    {
2648      "id": "qwen/qwen3-coder-flash",
2649      "name": "Qwen: Qwen3 Coder Flash",
2650      "cost_per_1m_in": 0.3,
2651      "cost_per_1m_out": 1.5,
2652      "cost_per_1m_in_cached": 0,
2653      "cost_per_1m_out_cached": 0.08,
2654      "context_window": 128000,
2655      "default_max_tokens": 32768,
2656      "can_reason": false,
2657      "supports_attachments": false,
2658      "options": {}
2659    },
2660    {
2661      "id": "qwen/qwen3-coder-plus",
2662      "name": "Qwen: Qwen3 Coder Plus",
2663      "cost_per_1m_in": 1,
2664      "cost_per_1m_out": 5,
2665      "cost_per_1m_in_cached": 0,
2666      "cost_per_1m_out_cached": 0.09999999999999999,
2667      "context_window": 128000,
2668      "default_max_tokens": 32768,
2669      "can_reason": false,
2670      "supports_attachments": false,
2671      "options": {}
2672    },
2673    {
2674      "id": "qwen/qwen3-max",
2675      "name": "Qwen: Qwen3 Max",
2676      "cost_per_1m_in": 1.2,
2677      "cost_per_1m_out": 6,
2678      "cost_per_1m_in_cached": 0,
2679      "cost_per_1m_out_cached": 0.24,
2680      "context_window": 256000,
2681      "default_max_tokens": 16384,
2682      "can_reason": false,
2683      "supports_attachments": false,
2684      "options": {}
2685    },
2686    {
2687      "id": "qwen/qwen3-next-80b-a3b-instruct",
2688      "name": "Qwen: Qwen3 Next 80B A3B Instruct",
2689      "cost_per_1m_in": 0.09999999999999999,
2690      "cost_per_1m_out": 0.7999999999999999,
2691      "cost_per_1m_in_cached": 0,
2692      "cost_per_1m_out_cached": 0,
2693      "context_window": 262144,
2694      "default_max_tokens": 131072,
2695      "can_reason": false,
2696      "supports_attachments": false,
2697      "options": {}
2698    },
2699    {
2700      "id": "qwen/qwen3-next-80b-a3b-thinking",
2701      "name": "Qwen: Qwen3 Next 80B A3B Thinking",
2702      "cost_per_1m_in": 0.3,
2703      "cost_per_1m_out": 0.3,
2704      "cost_per_1m_in_cached": 0,
2705      "cost_per_1m_out_cached": 0,
2706      "context_window": 262144,
2707      "default_max_tokens": 131072,
2708      "can_reason": true,
2709      "reasoning_levels": [
2710        "low",
2711        "medium",
2712        "high"
2713      ],
2714      "default_reasoning_effort": "medium",
2715      "supports_attachments": false,
2716      "options": {}
2717    },
2718    {
2719      "id": "qwen/qwen3-vl-235b-a22b-instruct",
2720      "name": "Qwen: Qwen3 VL 235B A22B Instruct",
2721      "cost_per_1m_in": 0.7,
2722      "cost_per_1m_out": 2.8,
2723      "cost_per_1m_in_cached": 0,
2724      "cost_per_1m_out_cached": 0,
2725      "context_window": 131072,
2726      "default_max_tokens": 16384,
2727      "can_reason": false,
2728      "supports_attachments": true,
2729      "options": {}
2730    },
2731    {
2732      "id": "qwen/qwen3-vl-235b-a22b-thinking",
2733      "name": "Qwen: Qwen3 VL 235B A22B Thinking",
2734      "cost_per_1m_in": 0.3,
2735      "cost_per_1m_out": 1.2,
2736      "cost_per_1m_in_cached": 0,
2737      "cost_per_1m_out_cached": 0,
2738      "context_window": 262144,
2739      "default_max_tokens": 131072,
2740      "can_reason": true,
2741      "reasoning_levels": [
2742        "low",
2743        "medium",
2744        "high"
2745      ],
2746      "default_reasoning_effort": "medium",
2747      "supports_attachments": true,
2748      "options": {}
2749    },
2750    {
2751      "id": "qwen/qwen3-vl-30b-a3b-instruct",
2752      "name": "Qwen: Qwen3 VL 30B A3B Instruct",
2753      "cost_per_1m_in": 0.15,
2754      "cost_per_1m_out": 0.6,
2755      "cost_per_1m_in_cached": 0,
2756      "cost_per_1m_out_cached": 0,
2757      "context_window": 262144,
2758      "default_max_tokens": 16384,
2759      "can_reason": false,
2760      "supports_attachments": true,
2761      "options": {}
2762    },
2763    {
2764      "id": "qwen/qwen3-vl-30b-a3b-thinking",
2765      "name": "Qwen: Qwen3 VL 30B A3B Thinking",
2766      "cost_per_1m_in": 0.19999999999999998,
2767      "cost_per_1m_out": 1,
2768      "cost_per_1m_in_cached": 0,
2769      "cost_per_1m_out_cached": 0,
2770      "context_window": 131072,
2771      "default_max_tokens": 16384,
2772      "can_reason": true,
2773      "reasoning_levels": [
2774        "low",
2775        "medium",
2776        "high"
2777      ],
2778      "default_reasoning_effort": "medium",
2779      "supports_attachments": true,
2780      "options": {}
2781    },
2782    {
2783      "id": "qwen/qwen3-vl-8b-instruct",
2784      "name": "Qwen: Qwen3 VL 8B Instruct",
2785      "cost_per_1m_in": 0.18,
2786      "cost_per_1m_out": 0.7,
2787      "cost_per_1m_in_cached": 0,
2788      "cost_per_1m_out_cached": 0,
2789      "context_window": 256000,
2790      "default_max_tokens": 16384,
2791      "can_reason": false,
2792      "supports_attachments": true,
2793      "options": {}
2794    },
2795    {
2796      "id": "qwen/qwen3-vl-8b-thinking",
2797      "name": "Qwen: Qwen3 VL 8B Thinking",
2798      "cost_per_1m_in": 0.18,
2799      "cost_per_1m_out": 2.0999999999999996,
2800      "cost_per_1m_in_cached": 0,
2801      "cost_per_1m_out_cached": 0,
2802      "context_window": 256000,
2803      "default_max_tokens": 16384,
2804      "can_reason": true,
2805      "reasoning_levels": [
2806        "low",
2807        "medium",
2808        "high"
2809      ],
2810      "default_reasoning_effort": "medium",
2811      "supports_attachments": true,
2812      "options": {}
2813    },
2814    {
2815      "id": "stepfun-ai/step3",
2816      "name": "StepFun: Step3",
2817      "cost_per_1m_in": 0.5700000000000001,
2818      "cost_per_1m_out": 1.42,
2819      "cost_per_1m_in_cached": 0,
2820      "cost_per_1m_out_cached": 0,
2821      "context_window": 65536,
2822      "default_max_tokens": 32768,
2823      "can_reason": true,
2824      "reasoning_levels": [
2825        "low",
2826        "medium",
2827        "high"
2828      ],
2829      "default_reasoning_effort": "medium",
2830      "supports_attachments": true,
2831      "options": {}
2832    },
2833    {
2834      "id": "tngtech/deepseek-r1t2-chimera",
2835      "name": "TNG: DeepSeek R1T2 Chimera",
2836      "cost_per_1m_in": 0.3,
2837      "cost_per_1m_out": 1.2,
2838      "cost_per_1m_in_cached": 0,
2839      "cost_per_1m_out_cached": 0,
2840      "context_window": 163840,
2841      "default_max_tokens": 81920,
2842      "can_reason": true,
2843      "reasoning_levels": [
2844        "low",
2845        "medium",
2846        "high"
2847      ],
2848      "default_reasoning_effort": "medium",
2849      "supports_attachments": false,
2850      "options": {}
2851    },
2852    {
2853      "id": "thedrummer/rocinante-12b",
2854      "name": "TheDrummer: Rocinante 12B",
2855      "cost_per_1m_in": 0.16999999999999998,
2856      "cost_per_1m_out": 0.43,
2857      "cost_per_1m_in_cached": 0,
2858      "cost_per_1m_out_cached": 0,
2859      "context_window": 32768,
2860      "default_max_tokens": 3276,
2861      "can_reason": false,
2862      "supports_attachments": false,
2863      "options": {}
2864    },
2865    {
2866      "id": "thedrummer/unslopnemo-12b",
2867      "name": "TheDrummer: UnslopNemo 12B",
2868      "cost_per_1m_in": 0.39999999999999997,
2869      "cost_per_1m_out": 0.39999999999999997,
2870      "cost_per_1m_in_cached": 0,
2871      "cost_per_1m_out_cached": 0,
2872      "context_window": 32768,
2873      "default_max_tokens": 3276,
2874      "can_reason": false,
2875      "supports_attachments": false,
2876      "options": {}
2877    },
2878    {
2879      "id": "alibaba/tongyi-deepresearch-30b-a3b",
2880      "name": "Tongyi DeepResearch 30B A3B",
2881      "cost_per_1m_in": 0.09,
2882      "cost_per_1m_out": 0.44999999999999996,
2883      "cost_per_1m_in_cached": 0,
2884      "cost_per_1m_out_cached": 0,
2885      "context_window": 131072,
2886      "default_max_tokens": 65536,
2887      "can_reason": true,
2888      "reasoning_levels": [
2889        "low",
2890        "medium",
2891        "high"
2892      ],
2893      "default_reasoning_effort": "medium",
2894      "supports_attachments": false,
2895      "options": {}
2896    },
2897    {
2898      "id": "alibaba/tongyi-deepresearch-30b-a3b:free",
2899      "name": "Tongyi DeepResearch 30B A3B (free)",
2900      "cost_per_1m_in": 0,
2901      "cost_per_1m_out": 0,
2902      "cost_per_1m_in_cached": 0,
2903      "cost_per_1m_out_cached": 0,
2904      "context_window": 131072,
2905      "default_max_tokens": 65536,
2906      "can_reason": true,
2907      "reasoning_levels": [
2908        "low",
2909        "medium",
2910        "high"
2911      ],
2912      "default_reasoning_effort": "medium",
2913      "supports_attachments": false,
2914      "options": {}
2915    },
2916    {
2917      "id": "z-ai/glm-4-32b",
2918      "name": "Z.AI: GLM 4 32B ",
2919      "cost_per_1m_in": 0.09999999999999999,
2920      "cost_per_1m_out": 0.09999999999999999,
2921      "cost_per_1m_in_cached": 0,
2922      "cost_per_1m_out_cached": 0,
2923      "context_window": 128000,
2924      "default_max_tokens": 12800,
2925      "can_reason": false,
2926      "supports_attachments": false,
2927      "options": {}
2928    },
2929    {
2930      "id": "z-ai/glm-4.5",
2931      "name": "Z.AI: GLM 4.5",
2932      "cost_per_1m_in": 0.35,
2933      "cost_per_1m_out": 1.55,
2934      "cost_per_1m_in_cached": 0,
2935      "cost_per_1m_out_cached": 0,
2936      "context_window": 131072,
2937      "default_max_tokens": 65536,
2938      "can_reason": true,
2939      "reasoning_levels": [
2940        "low",
2941        "medium",
2942        "high"
2943      ],
2944      "default_reasoning_effort": "medium",
2945      "supports_attachments": false,
2946      "options": {}
2947    },
2948    {
2949      "id": "z-ai/glm-4.5-air",
2950      "name": "Z.AI: GLM 4.5 Air",
2951      "cost_per_1m_in": 0.14,
2952      "cost_per_1m_out": 0.86,
2953      "cost_per_1m_in_cached": 0,
2954      "cost_per_1m_out_cached": 0,
2955      "context_window": 131072,
2956      "default_max_tokens": 65536,
2957      "can_reason": true,
2958      "reasoning_levels": [
2959        "low",
2960        "medium",
2961        "high"
2962      ],
2963      "default_reasoning_effort": "medium",
2964      "supports_attachments": false,
2965      "options": {}
2966    },
2967    {
2968      "id": "z-ai/glm-4.5-air:free",
2969      "name": "Z.AI: GLM 4.5 Air (free)",
2970      "cost_per_1m_in": 0,
2971      "cost_per_1m_out": 0,
2972      "cost_per_1m_in_cached": 0,
2973      "cost_per_1m_out_cached": 0,
2974      "context_window": 131072,
2975      "default_max_tokens": 48000,
2976      "can_reason": true,
2977      "reasoning_levels": [
2978        "low",
2979        "medium",
2980        "high"
2981      ],
2982      "default_reasoning_effort": "medium",
2983      "supports_attachments": false,
2984      "options": {}
2985    },
2986    {
2987      "id": "z-ai/glm-4.5v",
2988      "name": "Z.AI: GLM 4.5V",
2989      "cost_per_1m_in": 0.6,
2990      "cost_per_1m_out": 1.7999999999999998,
2991      "cost_per_1m_in_cached": 0,
2992      "cost_per_1m_out_cached": 0.11,
2993      "context_window": 65536,
2994      "default_max_tokens": 8192,
2995      "can_reason": true,
2996      "reasoning_levels": [
2997        "low",
2998        "medium",
2999        "high"
3000      ],
3001      "default_reasoning_effort": "medium",
3002      "supports_attachments": true,
3003      "options": {}
3004    },
3005    {
3006      "id": "z-ai/glm-4.6",
3007      "name": "Z.AI: GLM 4.6",
3008      "cost_per_1m_in": 0.6,
3009      "cost_per_1m_out": 2.2,
3010      "cost_per_1m_in_cached": 0,
3011      "cost_per_1m_out_cached": 0.11,
3012      "context_window": 204800,
3013      "default_max_tokens": 65536,
3014      "can_reason": true,
3015      "reasoning_levels": [
3016        "low",
3017        "medium",
3018        "high"
3019      ],
3020      "default_reasoning_effort": "medium",
3021      "supports_attachments": false,
3022      "options": {}
3023    },
3024    {
3025      "id": "z-ai/glm-4.6:exacto",
3026      "name": "Z.AI: GLM 4.6 (exacto)",
3027      "cost_per_1m_in": 0.6,
3028      "cost_per_1m_out": 2.2,
3029      "cost_per_1m_in_cached": 0,
3030      "cost_per_1m_out_cached": 0,
3031      "context_window": 204800,
3032      "default_max_tokens": 65536,
3033      "can_reason": true,
3034      "reasoning_levels": [
3035        "low",
3036        "medium",
3037        "high"
3038      ],
3039      "default_reasoning_effort": "medium",
3040      "supports_attachments": false,
3041      "options": {}
3042    },
3043    {
3044      "id": "inclusionai/ling-1t",
3045      "name": "inclusionAI: Ling-1T",
3046      "cost_per_1m_in": 0.5700000000000001,
3047      "cost_per_1m_out": 2.2800000000000002,
3048      "cost_per_1m_in_cached": 0,
3049      "cost_per_1m_out_cached": 0,
3050      "context_window": 131072,
3051      "default_max_tokens": 65536,
3052      "can_reason": false,
3053      "supports_attachments": false,
3054      "options": {}
3055    },
3056    {
3057      "id": "inclusionai/ring-1t",
3058      "name": "inclusionAI: Ring 1T",
3059      "cost_per_1m_in": 0.5700000000000001,
3060      "cost_per_1m_out": 2.2800000000000002,
3061      "cost_per_1m_in_cached": 0,
3062      "cost_per_1m_out_cached": 0,
3063      "context_window": 131072,
3064      "default_max_tokens": 65536,
3065      "can_reason": true,
3066      "reasoning_levels": [
3067        "low",
3068        "medium",
3069        "high"
3070      ],
3071      "default_reasoning_effort": "medium",
3072      "supports_attachments": false,
3073      "options": {}
3074    },
3075    {
3076      "id": "x-ai/grok-3",
3077      "name": "xAI: Grok 3",
3078      "cost_per_1m_in": 5,
3079      "cost_per_1m_out": 25,
3080      "cost_per_1m_in_cached": 0,
3081      "cost_per_1m_out_cached": 1.25,
3082      "context_window": 131072,
3083      "default_max_tokens": 13107,
3084      "can_reason": false,
3085      "supports_attachments": false,
3086      "options": {}
3087    },
3088    {
3089      "id": "x-ai/grok-3-beta",
3090      "name": "xAI: Grok 3 Beta",
3091      "cost_per_1m_in": 5,
3092      "cost_per_1m_out": 25,
3093      "cost_per_1m_in_cached": 0,
3094      "cost_per_1m_out_cached": 1.25,
3095      "context_window": 131072,
3096      "default_max_tokens": 13107,
3097      "can_reason": false,
3098      "supports_attachments": false,
3099      "options": {}
3100    },
3101    {
3102      "id": "x-ai/grok-3-mini",
3103      "name": "xAI: Grok 3 Mini",
3104      "cost_per_1m_in": 0.3,
3105      "cost_per_1m_out": 0.5,
3106      "cost_per_1m_in_cached": 0,
3107      "cost_per_1m_out_cached": 0.075,
3108      "context_window": 131072,
3109      "default_max_tokens": 13107,
3110      "can_reason": true,
3111      "reasoning_levels": [
3112        "low",
3113        "medium",
3114        "high"
3115      ],
3116      "default_reasoning_effort": "medium",
3117      "supports_attachments": false,
3118      "options": {}
3119    },
3120    {
3121      "id": "x-ai/grok-3-mini-beta",
3122      "name": "xAI: Grok 3 Mini Beta",
3123      "cost_per_1m_in": 0.3,
3124      "cost_per_1m_out": 0.5,
3125      "cost_per_1m_in_cached": 0,
3126      "cost_per_1m_out_cached": 0.075,
3127      "context_window": 131072,
3128      "default_max_tokens": 13107,
3129      "can_reason": true,
3130      "reasoning_levels": [
3131        "low",
3132        "medium",
3133        "high"
3134      ],
3135      "default_reasoning_effort": "medium",
3136      "supports_attachments": false,
3137      "options": {}
3138    },
3139    {
3140      "id": "x-ai/grok-4",
3141      "name": "xAI: Grok 4",
3142      "cost_per_1m_in": 3,
3143      "cost_per_1m_out": 15,
3144      "cost_per_1m_in_cached": 0,
3145      "cost_per_1m_out_cached": 0.75,
3146      "context_window": 256000,
3147      "default_max_tokens": 25600,
3148      "can_reason": true,
3149      "reasoning_levels": [
3150        "low",
3151        "medium",
3152        "high"
3153      ],
3154      "default_reasoning_effort": "medium",
3155      "supports_attachments": true,
3156      "options": {}
3157    },
3158    {
3159      "id": "x-ai/grok-4-fast",
3160      "name": "xAI: Grok 4 Fast",
3161      "cost_per_1m_in": 0.19999999999999998,
3162      "cost_per_1m_out": 0.5,
3163      "cost_per_1m_in_cached": 0,
3164      "cost_per_1m_out_cached": 0.049999999999999996,
3165      "context_window": 2000000,
3166      "default_max_tokens": 15000,
3167      "can_reason": true,
3168      "reasoning_levels": [
3169        "low",
3170        "medium",
3171        "high"
3172      ],
3173      "default_reasoning_effort": "medium",
3174      "supports_attachments": true,
3175      "options": {}
3176    },
3177    {
3178      "id": "x-ai/grok-code-fast-1",
3179      "name": "xAI: Grok Code Fast 1",
3180      "cost_per_1m_in": 0.19999999999999998,
3181      "cost_per_1m_out": 1.5,
3182      "cost_per_1m_in_cached": 0,
3183      "cost_per_1m_out_cached": 0.02,
3184      "context_window": 256000,
3185      "default_max_tokens": 5000,
3186      "can_reason": true,
3187      "reasoning_levels": [
3188        "low",
3189        "medium",
3190        "high"
3191      ],
3192      "default_reasoning_effort": "medium",
3193      "supports_attachments": false,
3194      "options": {}
3195    }
3196  ],
3197  "default_headers": {
3198    "HTTP-Referer": "https://charm.land",
3199    "X-Title": "Crush"
3200  }
3201}