openrouter.json

   1{
   2  "name": "OpenRouter",
   3  "id": "openrouter",
   4  "api_key": "$OPENROUTER_API_KEY",
   5  "api_endpoint": "https://openrouter.ai/api/v1",
   6  "type": "openrouter",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-3.5-haiku",
   9  "models": [
  10    {
  11      "id": "ai21/jamba-large-1.7",
  12      "name": "AI21: Jamba Large 1.7",
  13      "cost_per_1m_in": 2,
  14      "cost_per_1m_out": 8,
  15      "cost_per_1m_in_cached": 0,
  16      "cost_per_1m_out_cached": 0,
  17      "context_window": 256000,
  18      "default_max_tokens": 2048,
  19      "can_reason": false,
  20      "supports_attachments": false,
  21      "options": {}
  22    },
  23    {
  24      "id": "ai21/jamba-mini-1.7",
  25      "name": "AI21: Jamba Mini 1.7",
  26      "cost_per_1m_in": 0.19999999999999998,
  27      "cost_per_1m_out": 0.39999999999999997,
  28      "cost_per_1m_in_cached": 0,
  29      "cost_per_1m_out_cached": 0,
  30      "context_window": 256000,
  31      "default_max_tokens": 2048,
  32      "can_reason": false,
  33      "supports_attachments": false,
  34      "options": {}
  35    },
  36    {
  37      "id": "allenai/olmo-3-7b-instruct",
  38      "name": "AllenAI: Olmo 3 7B Instruct",
  39      "cost_per_1m_in": 0.09999999999999999,
  40      "cost_per_1m_out": 0.19999999999999998,
  41      "cost_per_1m_in_cached": 0,
  42      "cost_per_1m_out_cached": 0,
  43      "context_window": 65536,
  44      "default_max_tokens": 32768,
  45      "can_reason": false,
  46      "supports_attachments": false,
  47      "options": {}
  48    },
  49    {
  50      "id": "amazon/nova-lite-v1",
  51      "name": "Amazon: Nova Lite 1.0",
  52      "cost_per_1m_in": 0.06,
  53      "cost_per_1m_out": 0.24,
  54      "cost_per_1m_in_cached": 0,
  55      "cost_per_1m_out_cached": 0,
  56      "context_window": 300000,
  57      "default_max_tokens": 2560,
  58      "can_reason": false,
  59      "supports_attachments": true,
  60      "options": {}
  61    },
  62    {
  63      "id": "amazon/nova-micro-v1",
  64      "name": "Amazon: Nova Micro 1.0",
  65      "cost_per_1m_in": 0.035,
  66      "cost_per_1m_out": 0.14,
  67      "cost_per_1m_in_cached": 0,
  68      "cost_per_1m_out_cached": 0,
  69      "context_window": 128000,
  70      "default_max_tokens": 2560,
  71      "can_reason": false,
  72      "supports_attachments": false,
  73      "options": {}
  74    },
  75    {
  76      "id": "amazon/nova-premier-v1",
  77      "name": "Amazon: Nova Premier 1.0",
  78      "cost_per_1m_in": 2.5,
  79      "cost_per_1m_out": 12.5,
  80      "cost_per_1m_in_cached": 0,
  81      "cost_per_1m_out_cached": 0.625,
  82      "context_window": 1000000,
  83      "default_max_tokens": 16000,
  84      "can_reason": false,
  85      "supports_attachments": true,
  86      "options": {}
  87    },
  88    {
  89      "id": "amazon/nova-pro-v1",
  90      "name": "Amazon: Nova Pro 1.0",
  91      "cost_per_1m_in": 0.7999999999999999,
  92      "cost_per_1m_out": 3.1999999999999997,
  93      "cost_per_1m_in_cached": 0,
  94      "cost_per_1m_out_cached": 0,
  95      "context_window": 300000,
  96      "default_max_tokens": 2560,
  97      "can_reason": false,
  98      "supports_attachments": true,
  99      "options": {}
 100    },
 101    {
 102      "id": "anthropic/claude-3-haiku",
 103      "name": "Anthropic: Claude 3 Haiku",
 104      "cost_per_1m_in": 0.25,
 105      "cost_per_1m_out": 1.25,
 106      "cost_per_1m_in_cached": 0.3,
 107      "cost_per_1m_out_cached": 0.03,
 108      "context_window": 200000,
 109      "default_max_tokens": 2048,
 110      "can_reason": false,
 111      "supports_attachments": true,
 112      "options": {}
 113    },
 114    {
 115      "id": "anthropic/claude-3-opus",
 116      "name": "Anthropic: Claude 3 Opus",
 117      "cost_per_1m_in": 15,
 118      "cost_per_1m_out": 75,
 119      "cost_per_1m_in_cached": 18.75,
 120      "cost_per_1m_out_cached": 1.5,
 121      "context_window": 200000,
 122      "default_max_tokens": 2048,
 123      "can_reason": false,
 124      "supports_attachments": true,
 125      "options": {}
 126    },
 127    {
 128      "id": "anthropic/claude-3.5-haiku",
 129      "name": "Anthropic: Claude 3.5 Haiku",
 130      "cost_per_1m_in": 0.7999999999999999,
 131      "cost_per_1m_out": 4,
 132      "cost_per_1m_in_cached": 1,
 133      "cost_per_1m_out_cached": 0.08,
 134      "context_window": 200000,
 135      "default_max_tokens": 4096,
 136      "can_reason": false,
 137      "supports_attachments": true,
 138      "options": {}
 139    },
 140    {
 141      "id": "anthropic/claude-3.5-haiku-20241022",
 142      "name": "Anthropic: Claude 3.5 Haiku (2024-10-22)",
 143      "cost_per_1m_in": 0.7999999999999999,
 144      "cost_per_1m_out": 4,
 145      "cost_per_1m_in_cached": 1,
 146      "cost_per_1m_out_cached": 0.08,
 147      "context_window": 200000,
 148      "default_max_tokens": 4096,
 149      "can_reason": false,
 150      "supports_attachments": true,
 151      "options": {}
 152    },
 153    {
 154      "id": "anthropic/claude-3.5-sonnet",
 155      "name": "Anthropic: Claude 3.5 Sonnet",
 156      "cost_per_1m_in": 3,
 157      "cost_per_1m_out": 15,
 158      "cost_per_1m_in_cached": 3.75,
 159      "cost_per_1m_out_cached": 0.3,
 160      "context_window": 200000,
 161      "default_max_tokens": 4096,
 162      "can_reason": false,
 163      "supports_attachments": true,
 164      "options": {}
 165    },
 166    {
 167      "id": "anthropic/claude-3.7-sonnet",
 168      "name": "Anthropic: Claude 3.7 Sonnet",
 169      "cost_per_1m_in": 3,
 170      "cost_per_1m_out": 15,
 171      "cost_per_1m_in_cached": 3.75,
 172      "cost_per_1m_out_cached": 0.3,
 173      "context_window": 200000,
 174      "default_max_tokens": 32000,
 175      "can_reason": true,
 176      "reasoning_levels": [
 177        "low",
 178        "medium",
 179        "high"
 180      ],
 181      "default_reasoning_effort": "medium",
 182      "supports_attachments": true,
 183      "options": {}
 184    },
 185    {
 186      "id": "anthropic/claude-3.7-sonnet:thinking",
 187      "name": "Anthropic: Claude 3.7 Sonnet (thinking)",
 188      "cost_per_1m_in": 3,
 189      "cost_per_1m_out": 15,
 190      "cost_per_1m_in_cached": 3.75,
 191      "cost_per_1m_out_cached": 0.3,
 192      "context_window": 200000,
 193      "default_max_tokens": 32000,
 194      "can_reason": true,
 195      "reasoning_levels": [
 196        "low",
 197        "medium",
 198        "high"
 199      ],
 200      "default_reasoning_effort": "medium",
 201      "supports_attachments": true,
 202      "options": {}
 203    },
 204    {
 205      "id": "anthropic/claude-haiku-4.5",
 206      "name": "Anthropic: Claude Haiku 4.5",
 207      "cost_per_1m_in": 1,
 208      "cost_per_1m_out": 5,
 209      "cost_per_1m_in_cached": 1.25,
 210      "cost_per_1m_out_cached": 0.09999999999999999,
 211      "context_window": 200000,
 212      "default_max_tokens": 32000,
 213      "can_reason": true,
 214      "reasoning_levels": [
 215        "low",
 216        "medium",
 217        "high"
 218      ],
 219      "default_reasoning_effort": "medium",
 220      "supports_attachments": true,
 221      "options": {}
 222    },
 223    {
 224      "id": "anthropic/claude-opus-4",
 225      "name": "Anthropic: Claude Opus 4",
 226      "cost_per_1m_in": 15,
 227      "cost_per_1m_out": 75,
 228      "cost_per_1m_in_cached": 18.75,
 229      "cost_per_1m_out_cached": 1.5,
 230      "context_window": 200000,
 231      "default_max_tokens": 16000,
 232      "can_reason": true,
 233      "reasoning_levels": [
 234        "low",
 235        "medium",
 236        "high"
 237      ],
 238      "default_reasoning_effort": "medium",
 239      "supports_attachments": true,
 240      "options": {}
 241    },
 242    {
 243      "id": "anthropic/claude-opus-4.1",
 244      "name": "Anthropic: Claude Opus 4.1",
 245      "cost_per_1m_in": 15,
 246      "cost_per_1m_out": 75,
 247      "cost_per_1m_in_cached": 18.75,
 248      "cost_per_1m_out_cached": 1.5,
 249      "context_window": 200000,
 250      "default_max_tokens": 16000,
 251      "can_reason": true,
 252      "reasoning_levels": [
 253        "low",
 254        "medium",
 255        "high"
 256      ],
 257      "default_reasoning_effort": "medium",
 258      "supports_attachments": true,
 259      "options": {}
 260    },
 261    {
 262      "id": "anthropic/claude-opus-4.5",
 263      "name": "Anthropic: Claude Opus 4.5",
 264      "cost_per_1m_in": 5,
 265      "cost_per_1m_out": 25,
 266      "cost_per_1m_in_cached": 6.25,
 267      "cost_per_1m_out_cached": 0.5,
 268      "context_window": 200000,
 269      "default_max_tokens": 16000,
 270      "can_reason": true,
 271      "reasoning_levels": [
 272        "low",
 273        "medium",
 274        "high"
 275      ],
 276      "default_reasoning_effort": "medium",
 277      "supports_attachments": true,
 278      "options": {}
 279    },
 280    {
 281      "id": "anthropic/claude-sonnet-4",
 282      "name": "Anthropic: Claude Sonnet 4",
 283      "cost_per_1m_in": 3,
 284      "cost_per_1m_out": 15,
 285      "cost_per_1m_in_cached": 3.75,
 286      "cost_per_1m_out_cached": 0.3,
 287      "context_window": 1000000,
 288      "default_max_tokens": 32000,
 289      "can_reason": true,
 290      "reasoning_levels": [
 291        "low",
 292        "medium",
 293        "high"
 294      ],
 295      "default_reasoning_effort": "medium",
 296      "supports_attachments": true,
 297      "options": {}
 298    },
 299    {
 300      "id": "anthropic/claude-sonnet-4.5",
 301      "name": "Anthropic: Claude Sonnet 4.5",
 302      "cost_per_1m_in": 3,
 303      "cost_per_1m_out": 15,
 304      "cost_per_1m_in_cached": 3.75,
 305      "cost_per_1m_out_cached": 0.3,
 306      "context_window": 1000000,
 307      "default_max_tokens": 32000,
 308      "can_reason": true,
 309      "reasoning_levels": [
 310        "low",
 311        "medium",
 312        "high"
 313      ],
 314      "default_reasoning_effort": "medium",
 315      "supports_attachments": true,
 316      "options": {}
 317    },
 318    {
 319      "id": "arcee-ai/virtuoso-large",
 320      "name": "Arcee AI: Virtuoso Large",
 321      "cost_per_1m_in": 0.75,
 322      "cost_per_1m_out": 1.2,
 323      "cost_per_1m_in_cached": 0,
 324      "cost_per_1m_out_cached": 0,
 325      "context_window": 131072,
 326      "default_max_tokens": 32000,
 327      "can_reason": false,
 328      "supports_attachments": false,
 329      "options": {}
 330    },
 331    {
 332      "id": "baidu/ernie-4.5-21b-a3b",
 333      "name": "Baidu: ERNIE 4.5 21B A3B",
 334      "cost_per_1m_in": 0.056,
 335      "cost_per_1m_out": 0.224,
 336      "cost_per_1m_in_cached": 0,
 337      "cost_per_1m_out_cached": 0,
 338      "context_window": 120000,
 339      "default_max_tokens": 4000,
 340      "can_reason": false,
 341      "supports_attachments": false,
 342      "options": {}
 343    },
 344    {
 345      "id": "baidu/ernie-4.5-vl-28b-a3b",
 346      "name": "Baidu: ERNIE 4.5 VL 28B A3B",
 347      "cost_per_1m_in": 0.112,
 348      "cost_per_1m_out": 0.448,
 349      "cost_per_1m_in_cached": 0,
 350      "cost_per_1m_out_cached": 0,
 351      "context_window": 30000,
 352      "default_max_tokens": 4000,
 353      "can_reason": true,
 354      "reasoning_levels": [
 355        "low",
 356        "medium",
 357        "high"
 358      ],
 359      "default_reasoning_effort": "medium",
 360      "supports_attachments": true,
 361      "options": {}
 362    },
 363    {
 364      "id": "deepcogito/cogito-v2-preview-llama-109b-moe",
 365      "name": "Cogito V2 Preview Llama 109B",
 366      "cost_per_1m_in": 0.18,
 367      "cost_per_1m_out": 0.59,
 368      "cost_per_1m_in_cached": 0,
 369      "cost_per_1m_out_cached": 0,
 370      "context_window": 32767,
 371      "default_max_tokens": 3276,
 372      "can_reason": true,
 373      "reasoning_levels": [
 374        "low",
 375        "medium",
 376        "high"
 377      ],
 378      "default_reasoning_effort": "medium",
 379      "supports_attachments": true,
 380      "options": {}
 381    },
 382    {
 383      "id": "cohere/command-r-08-2024",
 384      "name": "Cohere: Command R (08-2024)",
 385      "cost_per_1m_in": 0.15,
 386      "cost_per_1m_out": 0.6,
 387      "cost_per_1m_in_cached": 0,
 388      "cost_per_1m_out_cached": 0,
 389      "context_window": 128000,
 390      "default_max_tokens": 2000,
 391      "can_reason": false,
 392      "supports_attachments": false,
 393      "options": {}
 394    },
 395    {
 396      "id": "cohere/command-r-plus-08-2024",
 397      "name": "Cohere: Command R+ (08-2024)",
 398      "cost_per_1m_in": 2.5,
 399      "cost_per_1m_out": 10,
 400      "cost_per_1m_in_cached": 0,
 401      "cost_per_1m_out_cached": 0,
 402      "context_window": 128000,
 403      "default_max_tokens": 2000,
 404      "can_reason": false,
 405      "supports_attachments": false,
 406      "options": {}
 407    },
 408    {
 409      "id": "deepcogito/cogito-v2-preview-llama-405b",
 410      "name": "Deep Cogito: Cogito V2 Preview Llama 405B",
 411      "cost_per_1m_in": 3.5,
 412      "cost_per_1m_out": 3.5,
 413      "cost_per_1m_in_cached": 0,
 414      "cost_per_1m_out_cached": 0,
 415      "context_window": 32768,
 416      "default_max_tokens": 3276,
 417      "can_reason": true,
 418      "reasoning_levels": [
 419        "low",
 420        "medium",
 421        "high"
 422      ],
 423      "default_reasoning_effort": "medium",
 424      "supports_attachments": false,
 425      "options": {}
 426    },
 427    {
 428      "id": "deepcogito/cogito-v2-preview-llama-70b",
 429      "name": "Deep Cogito: Cogito V2 Preview Llama 70B",
 430      "cost_per_1m_in": 0.88,
 431      "cost_per_1m_out": 0.88,
 432      "cost_per_1m_in_cached": 0,
 433      "cost_per_1m_out_cached": 0,
 434      "context_window": 32768,
 435      "default_max_tokens": 3276,
 436      "can_reason": true,
 437      "reasoning_levels": [
 438        "low",
 439        "medium",
 440        "high"
 441      ],
 442      "default_reasoning_effort": "medium",
 443      "supports_attachments": false,
 444      "options": {}
 445    },
 446    {
 447      "id": "deepseek/deepseek-chat",
 448      "name": "DeepSeek: DeepSeek V3",
 449      "cost_per_1m_in": 0.32,
 450      "cost_per_1m_out": 1.04,
 451      "cost_per_1m_in_cached": 0,
 452      "cost_per_1m_out_cached": 0,
 453      "context_window": 64000,
 454      "default_max_tokens": 8000,
 455      "can_reason": false,
 456      "supports_attachments": false,
 457      "options": {}
 458    },
 459    {
 460      "id": "deepseek/deepseek-chat-v3-0324",
 461      "name": "DeepSeek: DeepSeek V3 0324",
 462      "cost_per_1m_in": 0.216,
 463      "cost_per_1m_out": 0.896,
 464      "cost_per_1m_in_cached": 0,
 465      "cost_per_1m_out_cached": 0.135,
 466      "context_window": 163840,
 467      "default_max_tokens": 81920,
 468      "can_reason": false,
 469      "supports_attachments": false,
 470      "options": {}
 471    },
 472    {
 473      "id": "deepseek/deepseek-chat-v3-0324:free",
 474      "name": "DeepSeek: DeepSeek V3 0324 (free)",
 475      "cost_per_1m_in": 0,
 476      "cost_per_1m_out": 0,
 477      "cost_per_1m_in_cached": 0,
 478      "cost_per_1m_out_cached": 0,
 479      "context_window": 163840,
 480      "default_max_tokens": 16384,
 481      "can_reason": false,
 482      "supports_attachments": false,
 483      "options": {}
 484    },
 485    {
 486      "id": "deepseek/deepseek-chat-v3.1",
 487      "name": "DeepSeek: DeepSeek V3.1",
 488      "cost_per_1m_in": 0.56,
 489      "cost_per_1m_out": 1.68,
 490      "cost_per_1m_in_cached": 0,
 491      "cost_per_1m_out_cached": 0,
 492      "context_window": 163840,
 493      "default_max_tokens": 16384,
 494      "can_reason": true,
 495      "reasoning_levels": [
 496        "low",
 497        "medium",
 498        "high"
 499      ],
 500      "default_reasoning_effort": "medium",
 501      "supports_attachments": false,
 502      "options": {}
 503    },
 504    {
 505      "id": "deepseek/deepseek-v3.1-terminus",
 506      "name": "DeepSeek: DeepSeek V3.1 Terminus",
 507      "cost_per_1m_in": 0.22999999999999998,
 508      "cost_per_1m_out": 0.8999999999999999,
 509      "cost_per_1m_in_cached": 0,
 510      "cost_per_1m_out_cached": 0,
 511      "context_window": 163840,
 512      "default_max_tokens": 81920,
 513      "can_reason": true,
 514      "reasoning_levels": [
 515        "low",
 516        "medium",
 517        "high"
 518      ],
 519      "default_reasoning_effort": "medium",
 520      "supports_attachments": false,
 521      "options": {}
 522    },
 523    {
 524      "id": "deepseek/deepseek-v3.1-terminus:exacto",
 525      "name": "DeepSeek: DeepSeek V3.1 Terminus (exacto)",
 526      "cost_per_1m_in": 0.216,
 527      "cost_per_1m_out": 0.7999999999999999,
 528      "cost_per_1m_in_cached": 0,
 529      "cost_per_1m_out_cached": 0,
 530      "context_window": 131072,
 531      "default_max_tokens": 32768,
 532      "can_reason": true,
 533      "reasoning_levels": [
 534        "low",
 535        "medium",
 536        "high"
 537      ],
 538      "default_reasoning_effort": "medium",
 539      "supports_attachments": false,
 540      "options": {}
 541    },
 542    {
 543      "id": "deepseek/deepseek-v3.2-exp",
 544      "name": "DeepSeek: DeepSeek V3.2 Exp",
 545      "cost_per_1m_in": 0.27,
 546      "cost_per_1m_out": 0.39999999999999997,
 547      "cost_per_1m_in_cached": 0,
 548      "cost_per_1m_out_cached": 0,
 549      "context_window": 163840,
 550      "default_max_tokens": 16384,
 551      "can_reason": true,
 552      "reasoning_levels": [
 553        "low",
 554        "medium",
 555        "high"
 556      ],
 557      "default_reasoning_effort": "medium",
 558      "supports_attachments": false,
 559      "options": {}
 560    },
 561    {
 562      "id": "deepseek/deepseek-r1",
 563      "name": "DeepSeek: R1",
 564      "cost_per_1m_in": 0.7,
 565      "cost_per_1m_out": 2.4,
 566      "cost_per_1m_in_cached": 0,
 567      "cost_per_1m_out_cached": 0,
 568      "context_window": 163840,
 569      "default_max_tokens": 81920,
 570      "can_reason": true,
 571      "reasoning_levels": [
 572        "low",
 573        "medium",
 574        "high"
 575      ],
 576      "default_reasoning_effort": "medium",
 577      "supports_attachments": false,
 578      "options": {}
 579    },
 580    {
 581      "id": "deepseek/deepseek-r1-0528",
 582      "name": "DeepSeek: R1 0528",
 583      "cost_per_1m_in": 0.7999999999999999,
 584      "cost_per_1m_out": 2.4,
 585      "cost_per_1m_in_cached": 0,
 586      "cost_per_1m_out_cached": 0,
 587      "context_window": 163840,
 588      "default_max_tokens": 16384,
 589      "can_reason": true,
 590      "reasoning_levels": [
 591        "low",
 592        "medium",
 593        "high"
 594      ],
 595      "default_reasoning_effort": "medium",
 596      "supports_attachments": false,
 597      "options": {}
 598    },
 599    {
 600      "id": "deepseek/deepseek-r1-distill-llama-70b",
 601      "name": "DeepSeek: R1 Distill Llama 70B",
 602      "cost_per_1m_in": 0.03,
 603      "cost_per_1m_out": 0.13,
 604      "cost_per_1m_in_cached": 0,
 605      "cost_per_1m_out_cached": 0,
 606      "context_window": 131072,
 607      "default_max_tokens": 65536,
 608      "can_reason": true,
 609      "reasoning_levels": [
 610        "low",
 611        "medium",
 612        "high"
 613      ],
 614      "default_reasoning_effort": "medium",
 615      "supports_attachments": false,
 616      "options": {}
 617    },
 618    {
 619      "id": "google/gemini-2.0-flash-001",
 620      "name": "Google: Gemini 2.0 Flash",
 621      "cost_per_1m_in": 0.09999999999999999,
 622      "cost_per_1m_out": 0.39999999999999997,
 623      "cost_per_1m_in_cached": 0.18330000000000002,
 624      "cost_per_1m_out_cached": 0.024999999999999998,
 625      "context_window": 1048576,
 626      "default_max_tokens": 4096,
 627      "can_reason": false,
 628      "supports_attachments": true,
 629      "options": {}
 630    },
 631    {
 632      "id": "google/gemini-2.0-flash-exp:free",
 633      "name": "Google: Gemini 2.0 Flash Experimental (free)",
 634      "cost_per_1m_in": 0,
 635      "cost_per_1m_out": 0,
 636      "cost_per_1m_in_cached": 0,
 637      "cost_per_1m_out_cached": 0,
 638      "context_window": 1048576,
 639      "default_max_tokens": 4096,
 640      "can_reason": false,
 641      "supports_attachments": true,
 642      "options": {}
 643    },
 644    {
 645      "id": "google/gemini-2.0-flash-lite-001",
 646      "name": "Google: Gemini 2.0 Flash Lite",
 647      "cost_per_1m_in": 0.075,
 648      "cost_per_1m_out": 0.3,
 649      "cost_per_1m_in_cached": 0,
 650      "cost_per_1m_out_cached": 0,
 651      "context_window": 1048576,
 652      "default_max_tokens": 4096,
 653      "can_reason": false,
 654      "supports_attachments": true,
 655      "options": {}
 656    },
 657    {
 658      "id": "google/gemini-2.5-flash",
 659      "name": "Google: Gemini 2.5 Flash",
 660      "cost_per_1m_in": 0.3,
 661      "cost_per_1m_out": 2.5,
 662      "cost_per_1m_in_cached": 0.3833,
 663      "cost_per_1m_out_cached": 0.03,
 664      "context_window": 1048576,
 665      "default_max_tokens": 32767,
 666      "can_reason": true,
 667      "reasoning_levels": [
 668        "low",
 669        "medium",
 670        "high"
 671      ],
 672      "default_reasoning_effort": "medium",
 673      "supports_attachments": true,
 674      "options": {}
 675    },
 676    {
 677      "id": "google/gemini-2.5-flash-lite",
 678      "name": "Google: Gemini 2.5 Flash Lite",
 679      "cost_per_1m_in": 0.09999999999999999,
 680      "cost_per_1m_out": 0.39999999999999997,
 681      "cost_per_1m_in_cached": 0.18330000000000002,
 682      "cost_per_1m_out_cached": 0.024999999999999998,
 683      "context_window": 1048576,
 684      "default_max_tokens": 32767,
 685      "can_reason": true,
 686      "reasoning_levels": [
 687        "low",
 688        "medium",
 689        "high"
 690      ],
 691      "default_reasoning_effort": "medium",
 692      "supports_attachments": true,
 693      "options": {}
 694    },
 695    {
 696      "id": "google/gemini-2.5-flash-lite-preview-09-2025",
 697      "name": "Google: Gemini 2.5 Flash Lite Preview 09-2025",
 698      "cost_per_1m_in": 0.09999999999999999,
 699      "cost_per_1m_out": 0.39999999999999997,
 700      "cost_per_1m_in_cached": 0,
 701      "cost_per_1m_out_cached": 0,
 702      "context_window": 1048576,
 703      "default_max_tokens": 32768,
 704      "can_reason": true,
 705      "reasoning_levels": [
 706        "low",
 707        "medium",
 708        "high"
 709      ],
 710      "default_reasoning_effort": "medium",
 711      "supports_attachments": true,
 712      "options": {}
 713    },
 714    {
 715      "id": "google/gemini-2.5-flash-preview-09-2025",
 716      "name": "Google: Gemini 2.5 Flash Preview 09-2025",
 717      "cost_per_1m_in": 0.3,
 718      "cost_per_1m_out": 2.5,
 719      "cost_per_1m_in_cached": 0.3833,
 720      "cost_per_1m_out_cached": 0.075,
 721      "context_window": 1048576,
 722      "default_max_tokens": 32767,
 723      "can_reason": true,
 724      "reasoning_levels": [
 725        "low",
 726        "medium",
 727        "high"
 728      ],
 729      "default_reasoning_effort": "medium",
 730      "supports_attachments": true,
 731      "options": {}
 732    },
 733    {
 734      "id": "google/gemini-2.5-pro",
 735      "name": "Google: Gemini 2.5 Pro",
 736      "cost_per_1m_in": 1.25,
 737      "cost_per_1m_out": 10,
 738      "cost_per_1m_in_cached": 1.625,
 739      "cost_per_1m_out_cached": 0.125,
 740      "context_window": 1048576,
 741      "default_max_tokens": 32768,
 742      "can_reason": true,
 743      "reasoning_levels": [
 744        "low",
 745        "medium",
 746        "high"
 747      ],
 748      "default_reasoning_effort": "medium",
 749      "supports_attachments": true,
 750      "options": {}
 751    },
 752    {
 753      "id": "google/gemini-2.5-pro-preview-05-06",
 754      "name": "Google: Gemini 2.5 Pro Preview 05-06",
 755      "cost_per_1m_in": 1.25,
 756      "cost_per_1m_out": 10,
 757      "cost_per_1m_in_cached": 1.625,
 758      "cost_per_1m_out_cached": 0.125,
 759      "context_window": 1048576,
 760      "default_max_tokens": 32768,
 761      "can_reason": true,
 762      "reasoning_levels": [
 763        "low",
 764        "medium",
 765        "high"
 766      ],
 767      "default_reasoning_effort": "medium",
 768      "supports_attachments": true,
 769      "options": {}
 770    },
 771    {
 772      "id": "google/gemini-2.5-pro-preview",
 773      "name": "Google: Gemini 2.5 Pro Preview 06-05",
 774      "cost_per_1m_in": 1.25,
 775      "cost_per_1m_out": 10,
 776      "cost_per_1m_in_cached": 1.625,
 777      "cost_per_1m_out_cached": 0.125,
 778      "context_window": 1048576,
 779      "default_max_tokens": 32768,
 780      "can_reason": true,
 781      "reasoning_levels": [
 782        "low",
 783        "medium",
 784        "high"
 785      ],
 786      "default_reasoning_effort": "medium",
 787      "supports_attachments": true,
 788      "options": {}
 789    },
 790    {
 791      "id": "google/gemini-3-pro-preview",
 792      "name": "Google: Gemini 3 Pro Preview",
 793      "cost_per_1m_in": 2,
 794      "cost_per_1m_out": 12,
 795      "cost_per_1m_in_cached": 2.375,
 796      "cost_per_1m_out_cached": 0.19999999999999998,
 797      "context_window": 1048576,
 798      "default_max_tokens": 32768,
 799      "can_reason": true,
 800      "reasoning_levels": [
 801        "low",
 802        "medium",
 803        "high"
 804      ],
 805      "default_reasoning_effort": "medium",
 806      "supports_attachments": true,
 807      "options": {}
 808    },
 809    {
 810      "id": "google/gemma-3-27b-it",
 811      "name": "Google: Gemma 3 27B",
 812      "cost_per_1m_in": 0.13,
 813      "cost_per_1m_out": 0.52,
 814      "cost_per_1m_in_cached": 0,
 815      "cost_per_1m_out_cached": 0,
 816      "context_window": 96000,
 817      "default_max_tokens": 48000,
 818      "can_reason": false,
 819      "supports_attachments": true,
 820      "options": {}
 821    },
 822    {
 823      "id": "inception/mercury",
 824      "name": "Inception: Mercury",
 825      "cost_per_1m_in": 0.25,
 826      "cost_per_1m_out": 1,
 827      "cost_per_1m_in_cached": 0,
 828      "cost_per_1m_out_cached": 0,
 829      "context_window": 128000,
 830      "default_max_tokens": 8192,
 831      "can_reason": false,
 832      "supports_attachments": false,
 833      "options": {}
 834    },
 835    {
 836      "id": "inception/mercury-coder",
 837      "name": "Inception: Mercury Coder",
 838      "cost_per_1m_in": 0.25,
 839      "cost_per_1m_out": 1,
 840      "cost_per_1m_in_cached": 0,
 841      "cost_per_1m_out_cached": 0,
 842      "context_window": 128000,
 843      "default_max_tokens": 8192,
 844      "can_reason": false,
 845      "supports_attachments": false,
 846      "options": {}
 847    },
 848    {
 849      "id": "kwaipilot/kat-coder-pro:free",
 850      "name": "Kwaipilot: KAT-Coder-Pro V1 (free)",
 851      "cost_per_1m_in": 0,
 852      "cost_per_1m_out": 0,
 853      "cost_per_1m_in_cached": 0,
 854      "cost_per_1m_out_cached": 0,
 855      "context_window": 256000,
 856      "default_max_tokens": 16000,
 857      "can_reason": false,
 858      "supports_attachments": false,
 859      "options": {}
 860    },
 861    {
 862      "id": "meituan/longcat-flash-chat:free",
 863      "name": "Meituan: LongCat Flash Chat (free)",
 864      "cost_per_1m_in": 0,
 865      "cost_per_1m_out": 0,
 866      "cost_per_1m_in_cached": 0,
 867      "cost_per_1m_out_cached": 0,
 868      "context_window": 131072,
 869      "default_max_tokens": 65536,
 870      "can_reason": false,
 871      "supports_attachments": false,
 872      "options": {}
 873    },
 874    {
 875      "id": "meta-llama/llama-3.1-405b-instruct",
 876      "name": "Meta: Llama 3.1 405B Instruct",
 877      "cost_per_1m_in": 3.5,
 878      "cost_per_1m_out": 3.5,
 879      "cost_per_1m_in_cached": 0,
 880      "cost_per_1m_out_cached": 0,
 881      "context_window": 130815,
 882      "default_max_tokens": 13081,
 883      "can_reason": false,
 884      "supports_attachments": false,
 885      "options": {}
 886    },
 887    {
 888      "id": "meta-llama/llama-3.1-70b-instruct",
 889      "name": "Meta: Llama 3.1 70B Instruct",
 890      "cost_per_1m_in": 0.39999999999999997,
 891      "cost_per_1m_out": 0.39999999999999997,
 892      "cost_per_1m_in_cached": 0,
 893      "cost_per_1m_out_cached": 0,
 894      "context_window": 131072,
 895      "default_max_tokens": 8192,
 896      "can_reason": false,
 897      "supports_attachments": false,
 898      "options": {}
 899    },
 900    {
 901      "id": "meta-llama/llama-3.1-8b-instruct",
 902      "name": "Meta: Llama 3.1 8B Instruct",
 903      "cost_per_1m_in": 0.049999999999999996,
 904      "cost_per_1m_out": 0.08,
 905      "cost_per_1m_in_cached": 0,
 906      "cost_per_1m_out_cached": 0,
 907      "context_window": 131072,
 908      "default_max_tokens": 65536,
 909      "can_reason": false,
 910      "supports_attachments": false,
 911      "options": {}
 912    },
 913    {
 914      "id": "meta-llama/llama-3.2-3b-instruct",
 915      "name": "Meta: Llama 3.2 3B Instruct",
 916      "cost_per_1m_in": 0.024,
 917      "cost_per_1m_out": 0.04,
 918      "cost_per_1m_in_cached": 0,
 919      "cost_per_1m_out_cached": 0,
 920      "context_window": 32768,
 921      "default_max_tokens": 16000,
 922      "can_reason": false,
 923      "supports_attachments": false,
 924      "options": {}
 925    },
 926    {
 927      "id": "meta-llama/llama-3.3-70b-instruct",
 928      "name": "Meta: Llama 3.3 70B Instruct",
 929      "cost_per_1m_in": 0.59,
 930      "cost_per_1m_out": 0.7899999999999999,
 931      "cost_per_1m_in_cached": 0,
 932      "cost_per_1m_out_cached": 0,
 933      "context_window": 131072,
 934      "default_max_tokens": 16384,
 935      "can_reason": false,
 936      "supports_attachments": false,
 937      "options": {}
 938    },
 939    {
 940      "id": "meta-llama/llama-3.3-70b-instruct:free",
 941      "name": "Meta: Llama 3.3 70B Instruct (free)",
 942      "cost_per_1m_in": 0,
 943      "cost_per_1m_out": 0,
 944      "cost_per_1m_in_cached": 0,
 945      "cost_per_1m_out_cached": 0,
 946      "context_window": 131072,
 947      "default_max_tokens": 13107,
 948      "can_reason": false,
 949      "supports_attachments": false,
 950      "options": {}
 951    },
 952    {
 953      "id": "meta-llama/llama-4-maverick",
 954      "name": "Meta: Llama 4 Maverick",
 955      "cost_per_1m_in": 0.27,
 956      "cost_per_1m_out": 0.85,
 957      "cost_per_1m_in_cached": 0,
 958      "cost_per_1m_out_cached": 0,
 959      "context_window": 1048576,
 960      "default_max_tokens": 104857,
 961      "can_reason": false,
 962      "supports_attachments": true,
 963      "options": {}
 964    },
 965    {
 966      "id": "meta-llama/llama-4-scout",
 967      "name": "Meta: Llama 4 Scout",
 968      "cost_per_1m_in": 0.25,
 969      "cost_per_1m_out": 0.7,
 970      "cost_per_1m_in_cached": 0,
 971      "cost_per_1m_out_cached": 0,
 972      "context_window": 1310720,
 973      "default_max_tokens": 4096,
 974      "can_reason": false,
 975      "supports_attachments": true,
 976      "options": {}
 977    },
 978    {
 979      "id": "microsoft/phi-3-medium-128k-instruct",
 980      "name": "Microsoft: Phi-3 Medium 128K Instruct",
 981      "cost_per_1m_in": 1,
 982      "cost_per_1m_out": 1,
 983      "cost_per_1m_in_cached": 0,
 984      "cost_per_1m_out_cached": 0,
 985      "context_window": 128000,
 986      "default_max_tokens": 12800,
 987      "can_reason": false,
 988      "supports_attachments": false,
 989      "options": {}
 990    },
 991    {
 992      "id": "microsoft/phi-3-mini-128k-instruct",
 993      "name": "Microsoft: Phi-3 Mini 128K Instruct",
 994      "cost_per_1m_in": 0.09999999999999999,
 995      "cost_per_1m_out": 0.09999999999999999,
 996      "cost_per_1m_in_cached": 0,
 997      "cost_per_1m_out_cached": 0,
 998      "context_window": 128000,
 999      "default_max_tokens": 12800,
1000      "can_reason": false,
1001      "supports_attachments": false,
1002      "options": {}
1003    },
1004    {
1005      "id": "microsoft/phi-3.5-mini-128k-instruct",
1006      "name": "Microsoft: Phi-3.5 Mini 128K Instruct",
1007      "cost_per_1m_in": 0.09999999999999999,
1008      "cost_per_1m_out": 0.09999999999999999,
1009      "cost_per_1m_in_cached": 0,
1010      "cost_per_1m_out_cached": 0,
1011      "context_window": 128000,
1012      "default_max_tokens": 12800,
1013      "can_reason": false,
1014      "supports_attachments": false,
1015      "options": {}
1016    },
1017    {
1018      "id": "minimax/minimax-m2",
1019      "name": "MiniMax: MiniMax M2",
1020      "cost_per_1m_in": 0.24,
1021      "cost_per_1m_out": 0.96,
1022      "cost_per_1m_in_cached": 0,
1023      "cost_per_1m_out_cached": 0,
1024      "context_window": 204800,
1025      "default_max_tokens": 65536,
1026      "can_reason": true,
1027      "reasoning_levels": [
1028        "low",
1029        "medium",
1030        "high"
1031      ],
1032      "default_reasoning_effort": "medium",
1033      "supports_attachments": false,
1034      "options": {}
1035    },
1036    {
1037      "id": "mistralai/mistral-large",
1038      "name": "Mistral Large",
1039      "cost_per_1m_in": 2,
1040      "cost_per_1m_out": 6,
1041      "cost_per_1m_in_cached": 0,
1042      "cost_per_1m_out_cached": 0,
1043      "context_window": 128000,
1044      "default_max_tokens": 12800,
1045      "can_reason": false,
1046      "supports_attachments": false,
1047      "options": {}
1048    },
1049    {
1050      "id": "mistralai/mistral-large-2407",
1051      "name": "Mistral Large 2407",
1052      "cost_per_1m_in": 2,
1053      "cost_per_1m_out": 6,
1054      "cost_per_1m_in_cached": 0,
1055      "cost_per_1m_out_cached": 0,
1056      "context_window": 131072,
1057      "default_max_tokens": 13107,
1058      "can_reason": false,
1059      "supports_attachments": false,
1060      "options": {}
1061    },
1062    {
1063      "id": "mistralai/mistral-large-2411",
1064      "name": "Mistral Large 2411",
1065      "cost_per_1m_in": 2,
1066      "cost_per_1m_out": 6,
1067      "cost_per_1m_in_cached": 0,
1068      "cost_per_1m_out_cached": 0,
1069      "context_window": 131072,
1070      "default_max_tokens": 13107,
1071      "can_reason": false,
1072      "supports_attachments": false,
1073      "options": {}
1074    },
1075    {
1076      "id": "mistralai/mistral-small",
1077      "name": "Mistral Small",
1078      "cost_per_1m_in": 0.19999999999999998,
1079      "cost_per_1m_out": 0.6,
1080      "cost_per_1m_in_cached": 0,
1081      "cost_per_1m_out_cached": 0,
1082      "context_window": 32768,
1083      "default_max_tokens": 3276,
1084      "can_reason": false,
1085      "supports_attachments": false,
1086      "options": {}
1087    },
1088    {
1089      "id": "mistralai/mistral-tiny",
1090      "name": "Mistral Tiny",
1091      "cost_per_1m_in": 0.25,
1092      "cost_per_1m_out": 0.25,
1093      "cost_per_1m_in_cached": 0,
1094      "cost_per_1m_out_cached": 0,
1095      "context_window": 32768,
1096      "default_max_tokens": 3276,
1097      "can_reason": false,
1098      "supports_attachments": false,
1099      "options": {}
1100    },
1101    {
1102      "id": "mistralai/codestral-2501",
1103      "name": "Mistral: Codestral 2501",
1104      "cost_per_1m_in": 0.3,
1105      "cost_per_1m_out": 0.8999999999999999,
1106      "cost_per_1m_in_cached": 0,
1107      "cost_per_1m_out_cached": 0,
1108      "context_window": 256000,
1109      "default_max_tokens": 25600,
1110      "can_reason": false,
1111      "supports_attachments": false,
1112      "options": {}
1113    },
1114    {
1115      "id": "mistralai/codestral-2508",
1116      "name": "Mistral: Codestral 2508",
1117      "cost_per_1m_in": 0.3,
1118      "cost_per_1m_out": 0.8999999999999999,
1119      "cost_per_1m_in_cached": 0,
1120      "cost_per_1m_out_cached": 0,
1121      "context_window": 256000,
1122      "default_max_tokens": 25600,
1123      "can_reason": false,
1124      "supports_attachments": false,
1125      "options": {}
1126    },
1127    {
1128      "id": "mistralai/devstral-medium",
1129      "name": "Mistral: Devstral Medium",
1130      "cost_per_1m_in": 0.39999999999999997,
1131      "cost_per_1m_out": 2,
1132      "cost_per_1m_in_cached": 0,
1133      "cost_per_1m_out_cached": 0,
1134      "context_window": 131072,
1135      "default_max_tokens": 13107,
1136      "can_reason": false,
1137      "supports_attachments": false,
1138      "options": {}
1139    },
1140    {
1141      "id": "mistralai/devstral-small",
1142      "name": "Mistral: Devstral Small 1.1",
1143      "cost_per_1m_in": 0.09999999999999999,
1144      "cost_per_1m_out": 0.3,
1145      "cost_per_1m_in_cached": 0,
1146      "cost_per_1m_out_cached": 0,
1147      "context_window": 131072,
1148      "default_max_tokens": 13107,
1149      "can_reason": false,
1150      "supports_attachments": false,
1151      "options": {}
1152    },
1153    {
1154      "id": "mistralai/magistral-medium-2506",
1155      "name": "Mistral: Magistral Medium 2506",
1156      "cost_per_1m_in": 2,
1157      "cost_per_1m_out": 5,
1158      "cost_per_1m_in_cached": 0,
1159      "cost_per_1m_out_cached": 0,
1160      "context_window": 40960,
1161      "default_max_tokens": 20000,
1162      "can_reason": true,
1163      "reasoning_levels": [
1164        "low",
1165        "medium",
1166        "high"
1167      ],
1168      "default_reasoning_effort": "medium",
1169      "supports_attachments": false,
1170      "options": {}
1171    },
1172    {
1173      "id": "mistralai/magistral-medium-2506:thinking",
1174      "name": "Mistral: Magistral Medium 2506 (thinking)",
1175      "cost_per_1m_in": 2,
1176      "cost_per_1m_out": 5,
1177      "cost_per_1m_in_cached": 0,
1178      "cost_per_1m_out_cached": 0,
1179      "context_window": 40960,
1180      "default_max_tokens": 20000,
1181      "can_reason": true,
1182      "reasoning_levels": [
1183        "low",
1184        "medium",
1185        "high"
1186      ],
1187      "default_reasoning_effort": "medium",
1188      "supports_attachments": false,
1189      "options": {}
1190    },
1191    {
1192      "id": "mistralai/magistral-small-2506",
1193      "name": "Mistral: Magistral Small 2506",
1194      "cost_per_1m_in": 0.5,
1195      "cost_per_1m_out": 1.5,
1196      "cost_per_1m_in_cached": 0,
1197      "cost_per_1m_out_cached": 0,
1198      "context_window": 40000,
1199      "default_max_tokens": 20000,
1200      "can_reason": true,
1201      "reasoning_levels": [
1202        "low",
1203        "medium",
1204        "high"
1205      ],
1206      "default_reasoning_effort": "medium",
1207      "supports_attachments": false,
1208      "options": {}
1209    },
1210    {
1211      "id": "mistralai/ministral-3b",
1212      "name": "Mistral: Ministral 3B",
1213      "cost_per_1m_in": 0.04,
1214      "cost_per_1m_out": 0.04,
1215      "cost_per_1m_in_cached": 0,
1216      "cost_per_1m_out_cached": 0,
1217      "context_window": 131072,
1218      "default_max_tokens": 13107,
1219      "can_reason": false,
1220      "supports_attachments": false,
1221      "options": {}
1222    },
1223    {
1224      "id": "mistralai/ministral-8b",
1225      "name": "Mistral: Ministral 8B",
1226      "cost_per_1m_in": 0.09999999999999999,
1227      "cost_per_1m_out": 0.09999999999999999,
1228      "cost_per_1m_in_cached": 0,
1229      "cost_per_1m_out_cached": 0,
1230      "context_window": 131072,
1231      "default_max_tokens": 13107,
1232      "can_reason": false,
1233      "supports_attachments": false,
1234      "options": {}
1235    },
1236    {
1237      "id": "mistralai/mistral-7b-instruct",
1238      "name": "Mistral: Mistral 7B Instruct",
1239      "cost_per_1m_in": 0.028,
1240      "cost_per_1m_out": 0.054,
1241      "cost_per_1m_in_cached": 0,
1242      "cost_per_1m_out_cached": 0,
1243      "context_window": 32768,
1244      "default_max_tokens": 8192,
1245      "can_reason": false,
1246      "supports_attachments": false,
1247      "options": {}
1248    },
1249    {
1250      "id": "mistralai/mistral-7b-instruct:free",
1251      "name": "Mistral: Mistral 7B Instruct (free)",
1252      "cost_per_1m_in": 0,
1253      "cost_per_1m_out": 0,
1254      "cost_per_1m_in_cached": 0,
1255      "cost_per_1m_out_cached": 0,
1256      "context_window": 32768,
1257      "default_max_tokens": 8192,
1258      "can_reason": false,
1259      "supports_attachments": false,
1260      "options": {}
1261    },
1262    {
1263      "id": "mistralai/mistral-medium-3",
1264      "name": "Mistral: Mistral Medium 3",
1265      "cost_per_1m_in": 0.39999999999999997,
1266      "cost_per_1m_out": 2,
1267      "cost_per_1m_in_cached": 0,
1268      "cost_per_1m_out_cached": 0,
1269      "context_window": 131072,
1270      "default_max_tokens": 13107,
1271      "can_reason": false,
1272      "supports_attachments": true,
1273      "options": {}
1274    },
1275    {
1276      "id": "mistralai/mistral-medium-3.1",
1277      "name": "Mistral: Mistral Medium 3.1",
1278      "cost_per_1m_in": 0.39999999999999997,
1279      "cost_per_1m_out": 2,
1280      "cost_per_1m_in_cached": 0,
1281      "cost_per_1m_out_cached": 0,
1282      "context_window": 131072,
1283      "default_max_tokens": 13107,
1284      "can_reason": false,
1285      "supports_attachments": true,
1286      "options": {}
1287    },
1288    {
1289      "id": "mistralai/mistral-nemo",
1290      "name": "Mistral: Mistral Nemo",
1291      "cost_per_1m_in": 0.15,
1292      "cost_per_1m_out": 0.15,
1293      "cost_per_1m_in_cached": 0,
1294      "cost_per_1m_out_cached": 0,
1295      "context_window": 131072,
1296      "default_max_tokens": 13107,
1297      "can_reason": false,
1298      "supports_attachments": false,
1299      "options": {}
1300    },
1301    {
1302      "id": "mistralai/mistral-small-24b-instruct-2501",
1303      "name": "Mistral: Mistral Small 3",
1304      "cost_per_1m_in": 0.09999999999999999,
1305      "cost_per_1m_out": 0.3,
1306      "cost_per_1m_in_cached": 0,
1307      "cost_per_1m_out_cached": 0,
1308      "context_window": 32768,
1309      "default_max_tokens": 3276,
1310      "can_reason": false,
1311      "supports_attachments": false,
1312      "options": {}
1313    },
1314    {
1315      "id": "mistralai/mistral-small-3.1-24b-instruct",
1316      "name": "Mistral: Mistral Small 3.1 24B",
1317      "cost_per_1m_in": 0.09999999999999999,
1318      "cost_per_1m_out": 0.3,
1319      "cost_per_1m_in_cached": 0,
1320      "cost_per_1m_out_cached": 0,
1321      "context_window": 131072,
1322      "default_max_tokens": 13107,
1323      "can_reason": false,
1324      "supports_attachments": true,
1325      "options": {}
1326    },
1327    {
1328      "id": "mistralai/mistral-small-3.1-24b-instruct:free",
1329      "name": "Mistral: Mistral Small 3.1 24B (free)",
1330      "cost_per_1m_in": 0,
1331      "cost_per_1m_out": 0,
1332      "cost_per_1m_in_cached": 0,
1333      "cost_per_1m_out_cached": 0,
1334      "context_window": 96000,
1335      "default_max_tokens": 48000,
1336      "can_reason": false,
1337      "supports_attachments": true,
1338      "options": {}
1339    },
1340    {
1341      "id": "mistralai/mistral-small-3.2-24b-instruct",
1342      "name": "Mistral: Mistral Small 3.2 24B",
1343      "cost_per_1m_in": 0.09999999999999999,
1344      "cost_per_1m_out": 0.3,
1345      "cost_per_1m_in_cached": 0,
1346      "cost_per_1m_out_cached": 0,
1347      "context_window": 131072,
1348      "default_max_tokens": 13107,
1349      "can_reason": false,
1350      "supports_attachments": true,
1351      "options": {}
1352    },
1353    {
1354      "id": "mistralai/mistral-small-3.2-24b-instruct:free",
1355      "name": "Mistral: Mistral Small 3.2 24B (free)",
1356      "cost_per_1m_in": 0,
1357      "cost_per_1m_out": 0,
1358      "cost_per_1m_in_cached": 0,
1359      "cost_per_1m_out_cached": 0,
1360      "context_window": 131072,
1361      "default_max_tokens": 13107,
1362      "can_reason": false,
1363      "supports_attachments": true,
1364      "options": {}
1365    },
1366    {
1367      "id": "mistralai/mixtral-8x22b-instruct",
1368      "name": "Mistral: Mixtral 8x22B Instruct",
1369      "cost_per_1m_in": 2,
1370      "cost_per_1m_out": 6,
1371      "cost_per_1m_in_cached": 0,
1372      "cost_per_1m_out_cached": 0,
1373      "context_window": 65536,
1374      "default_max_tokens": 6553,
1375      "can_reason": false,
1376      "supports_attachments": false,
1377      "options": {}
1378    },
1379    {
1380      "id": "mistralai/mixtral-8x7b-instruct",
1381      "name": "Mistral: Mixtral 8x7B Instruct",
1382      "cost_per_1m_in": 0.54,
1383      "cost_per_1m_out": 0.54,
1384      "cost_per_1m_in_cached": 0,
1385      "cost_per_1m_out_cached": 0,
1386      "context_window": 32768,
1387      "default_max_tokens": 8192,
1388      "can_reason": false,
1389      "supports_attachments": false,
1390      "options": {}
1391    },
1392    {
1393      "id": "mistralai/pixtral-large-2411",
1394      "name": "Mistral: Pixtral Large 2411",
1395      "cost_per_1m_in": 2,
1396      "cost_per_1m_out": 6,
1397      "cost_per_1m_in_cached": 0,
1398      "cost_per_1m_out_cached": 0,
1399      "context_window": 131072,
1400      "default_max_tokens": 13107,
1401      "can_reason": false,
1402      "supports_attachments": true,
1403      "options": {}
1404    },
1405    {
1406      "id": "mistralai/mistral-saba",
1407      "name": "Mistral: Saba",
1408      "cost_per_1m_in": 0.19999999999999998,
1409      "cost_per_1m_out": 0.6,
1410      "cost_per_1m_in_cached": 0,
1411      "cost_per_1m_out_cached": 0,
1412      "context_window": 32768,
1413      "default_max_tokens": 3276,
1414      "can_reason": false,
1415      "supports_attachments": false,
1416      "options": {}
1417    },
1418    {
1419      "id": "mistralai/voxtral-small-24b-2507",
1420      "name": "Mistral: Voxtral Small 24B 2507",
1421      "cost_per_1m_in": 0.09999999999999999,
1422      "cost_per_1m_out": 0.3,
1423      "cost_per_1m_in_cached": 0,
1424      "cost_per_1m_out_cached": 0,
1425      "context_window": 32000,
1426      "default_max_tokens": 3200,
1427      "can_reason": false,
1428      "supports_attachments": false,
1429      "options": {}
1430    },
1431    {
1432      "id": "moonshotai/kimi-k2",
1433      "name": "MoonshotAI: Kimi K2 0711",
1434      "cost_per_1m_in": 0.6,
1435      "cost_per_1m_out": 2.5,
1436      "cost_per_1m_in_cached": 0,
1437      "cost_per_1m_out_cached": 0.15,
1438      "context_window": 131072,
1439      "default_max_tokens": 13107,
1440      "can_reason": false,
1441      "supports_attachments": false,
1442      "options": {}
1443    },
1444    {
1445      "id": "moonshotai/kimi-k2-0905",
1446      "name": "MoonshotAI: Kimi K2 0905",
1447      "cost_per_1m_in": 0.39999999999999997,
1448      "cost_per_1m_out": 2,
1449      "cost_per_1m_in_cached": 0,
1450      "cost_per_1m_out_cached": 0,
1451      "context_window": 262144,
1452      "default_max_tokens": 131072,
1453      "can_reason": false,
1454      "supports_attachments": false,
1455      "options": {}
1456    },
1457    {
1458      "id": "moonshotai/kimi-k2-0905:exacto",
1459      "name": "MoonshotAI: Kimi K2 0905 (exacto)",
1460      "cost_per_1m_in": 0.6,
1461      "cost_per_1m_out": 2.5,
1462      "cost_per_1m_in_cached": 0,
1463      "cost_per_1m_out_cached": 0,
1464      "context_window": 262144,
1465      "default_max_tokens": 26214,
1466      "can_reason": false,
1467      "supports_attachments": false,
1468      "options": {}
1469    },
1470    {
1471      "id": "moonshotai/kimi-k2-thinking",
1472      "name": "MoonshotAI: Kimi K2 Thinking",
1473      "cost_per_1m_in": 0.48,
1474      "cost_per_1m_out": 2,
1475      "cost_per_1m_in_cached": 0,
1476      "cost_per_1m_out_cached": 0.15,
1477      "context_window": 262144,
1478      "default_max_tokens": 131072,
1479      "can_reason": true,
1480      "reasoning_levels": [
1481        "low",
1482        "medium",
1483        "high"
1484      ],
1485      "default_reasoning_effort": "medium",
1486      "supports_attachments": false,
1487      "options": {}
1488    },
1489    {
1490      "id": "nvidia/llama-3.1-nemotron-70b-instruct",
1491      "name": "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
1492      "cost_per_1m_in": 1.2,
1493      "cost_per_1m_out": 1.2,
1494      "cost_per_1m_in_cached": 0,
1495      "cost_per_1m_out_cached": 0,
1496      "context_window": 131072,
1497      "default_max_tokens": 8192,
1498      "can_reason": false,
1499      "supports_attachments": false,
1500      "options": {}
1501    },
1502    {
1503      "id": "nvidia/llama-3.3-nemotron-super-49b-v1.5",
1504      "name": "NVIDIA: Llama 3.3 Nemotron Super 49B V1.5",
1505      "cost_per_1m_in": 0.09999999999999999,
1506      "cost_per_1m_out": 0.39999999999999997,
1507      "cost_per_1m_in_cached": 0,
1508      "cost_per_1m_out_cached": 0,
1509      "context_window": 131072,
1510      "default_max_tokens": 13107,
1511      "can_reason": true,
1512      "reasoning_levels": [
1513        "low",
1514        "medium",
1515        "high"
1516      ],
1517      "default_reasoning_effort": "medium",
1518      "supports_attachments": false,
1519      "options": {}
1520    },
1521    {
1522      "id": "nvidia/nemotron-nano-12b-v2-vl:free",
1523      "name": "NVIDIA: Nemotron Nano 12B 2 VL (free)",
1524      "cost_per_1m_in": 0,
1525      "cost_per_1m_out": 0,
1526      "cost_per_1m_in_cached": 0,
1527      "cost_per_1m_out_cached": 0,
1528      "context_window": 128000,
1529      "default_max_tokens": 64000,
1530      "can_reason": true,
1531      "reasoning_levels": [
1532        "low",
1533        "medium",
1534        "high"
1535      ],
1536      "default_reasoning_effort": "medium",
1537      "supports_attachments": true,
1538      "options": {}
1539    },
1540    {
1541      "id": "nvidia/nemotron-nano-9b-v2",
1542      "name": "NVIDIA: Nemotron Nano 9B V2",
1543      "cost_per_1m_in": 0.04,
1544      "cost_per_1m_out": 0.16,
1545      "cost_per_1m_in_cached": 0,
1546      "cost_per_1m_out_cached": 0,
1547      "context_window": 131072,
1548      "default_max_tokens": 13107,
1549      "can_reason": true,
1550      "reasoning_levels": [
1551        "low",
1552        "medium",
1553        "high"
1554      ],
1555      "default_reasoning_effort": "medium",
1556      "supports_attachments": false,
1557      "options": {}
1558    },
1559    {
1560      "id": "nvidia/nemotron-nano-9b-v2:free",
1561      "name": "NVIDIA: Nemotron Nano 9B V2 (free)",
1562      "cost_per_1m_in": 0,
1563      "cost_per_1m_out": 0,
1564      "cost_per_1m_in_cached": 0,
1565      "cost_per_1m_out_cached": 0,
1566      "context_window": 128000,
1567      "default_max_tokens": 12800,
1568      "can_reason": true,
1569      "reasoning_levels": [
1570        "low",
1571        "medium",
1572        "high"
1573      ],
1574      "default_reasoning_effort": "medium",
1575      "supports_attachments": false,
1576      "options": {}
1577    },
1578    {
1579      "id": "nousresearch/deephermes-3-mistral-24b-preview",
1580      "name": "Nous: DeepHermes 3 Mistral 24B Preview",
1581      "cost_per_1m_in": 0.15,
1582      "cost_per_1m_out": 0.59,
1583      "cost_per_1m_in_cached": 0,
1584      "cost_per_1m_out_cached": 0,
1585      "context_window": 32768,
1586      "default_max_tokens": 16384,
1587      "can_reason": true,
1588      "reasoning_levels": [
1589        "low",
1590        "medium",
1591        "high"
1592      ],
1593      "default_reasoning_effort": "medium",
1594      "supports_attachments": false,
1595      "options": {}
1596    },
1597    {
1598      "id": "nousresearch/hermes-4-405b",
1599      "name": "Nous: Hermes 4 405B",
1600      "cost_per_1m_in": 0.3,
1601      "cost_per_1m_out": 1.2,
1602      "cost_per_1m_in_cached": 0,
1603      "cost_per_1m_out_cached": 0,
1604      "context_window": 131072,
1605      "default_max_tokens": 65536,
1606      "can_reason": true,
1607      "reasoning_levels": [
1608        "low",
1609        "medium",
1610        "high"
1611      ],
1612      "default_reasoning_effort": "medium",
1613      "supports_attachments": false,
1614      "options": {}
1615    },
1616    {
1617      "id": "openai/codex-mini",
1618      "name": "OpenAI: Codex Mini",
1619      "cost_per_1m_in": 1.5,
1620      "cost_per_1m_out": 6,
1621      "cost_per_1m_in_cached": 0,
1622      "cost_per_1m_out_cached": 0.375,
1623      "context_window": 200000,
1624      "default_max_tokens": 50000,
1625      "can_reason": true,
1626      "reasoning_levels": [
1627        "low",
1628        "medium",
1629        "high"
1630      ],
1631      "default_reasoning_effort": "medium",
1632      "supports_attachments": true,
1633      "options": {}
1634    },
1635    {
1636      "id": "openai/gpt-4-turbo",
1637      "name": "OpenAI: GPT-4 Turbo",
1638      "cost_per_1m_in": 10,
1639      "cost_per_1m_out": 30,
1640      "cost_per_1m_in_cached": 0,
1641      "cost_per_1m_out_cached": 0,
1642      "context_window": 128000,
1643      "default_max_tokens": 2048,
1644      "can_reason": false,
1645      "supports_attachments": true,
1646      "options": {}
1647    },
1648    {
1649      "id": "openai/gpt-4-1106-preview",
1650      "name": "OpenAI: GPT-4 Turbo (older v1106)",
1651      "cost_per_1m_in": 10,
1652      "cost_per_1m_out": 30,
1653      "cost_per_1m_in_cached": 0,
1654      "cost_per_1m_out_cached": 0,
1655      "context_window": 128000,
1656      "default_max_tokens": 2048,
1657      "can_reason": false,
1658      "supports_attachments": false,
1659      "options": {}
1660    },
1661    {
1662      "id": "openai/gpt-4-turbo-preview",
1663      "name": "OpenAI: GPT-4 Turbo Preview",
1664      "cost_per_1m_in": 10,
1665      "cost_per_1m_out": 30,
1666      "cost_per_1m_in_cached": 0,
1667      "cost_per_1m_out_cached": 0,
1668      "context_window": 128000,
1669      "default_max_tokens": 2048,
1670      "can_reason": false,
1671      "supports_attachments": false,
1672      "options": {}
1673    },
1674    {
1675      "id": "openai/gpt-4.1",
1676      "name": "OpenAI: GPT-4.1",
1677      "cost_per_1m_in": 2,
1678      "cost_per_1m_out": 8,
1679      "cost_per_1m_in_cached": 0,
1680      "cost_per_1m_out_cached": 0.5,
1681      "context_window": 1047576,
1682      "default_max_tokens": 104757,
1683      "can_reason": false,
1684      "supports_attachments": true,
1685      "options": {}
1686    },
1687    {
1688      "id": "openai/gpt-4.1-mini",
1689      "name": "OpenAI: GPT-4.1 Mini",
1690      "cost_per_1m_in": 0.39999999999999997,
1691      "cost_per_1m_out": 1.5999999999999999,
1692      "cost_per_1m_in_cached": 0,
1693      "cost_per_1m_out_cached": 0.09999999999999999,
1694      "context_window": 1047576,
1695      "default_max_tokens": 16384,
1696      "can_reason": false,
1697      "supports_attachments": true,
1698      "options": {}
1699    },
1700    {
1701      "id": "openai/gpt-4.1-nano",
1702      "name": "OpenAI: GPT-4.1 Nano",
1703      "cost_per_1m_in": 0.09999999999999999,
1704      "cost_per_1m_out": 0.39999999999999997,
1705      "cost_per_1m_in_cached": 0,
1706      "cost_per_1m_out_cached": 0.024999999999999998,
1707      "context_window": 1047576,
1708      "default_max_tokens": 16384,
1709      "can_reason": false,
1710      "supports_attachments": true,
1711      "options": {}
1712    },
1713    {
1714      "id": "openai/gpt-4o",
1715      "name": "OpenAI: GPT-4o",
1716      "cost_per_1m_in": 2.5,
1717      "cost_per_1m_out": 10,
1718      "cost_per_1m_in_cached": 0,
1719      "cost_per_1m_out_cached": 0,
1720      "context_window": 128000,
1721      "default_max_tokens": 8192,
1722      "can_reason": false,
1723      "supports_attachments": true,
1724      "options": {}
1725    },
1726    {
1727      "id": "openai/gpt-4o-2024-05-13",
1728      "name": "OpenAI: GPT-4o (2024-05-13)",
1729      "cost_per_1m_in": 5,
1730      "cost_per_1m_out": 15,
1731      "cost_per_1m_in_cached": 0,
1732      "cost_per_1m_out_cached": 0,
1733      "context_window": 128000,
1734      "default_max_tokens": 2048,
1735      "can_reason": false,
1736      "supports_attachments": true,
1737      "options": {}
1738    },
1739    {
1740      "id": "openai/gpt-4o-2024-08-06",
1741      "name": "OpenAI: GPT-4o (2024-08-06)",
1742      "cost_per_1m_in": 2.5,
1743      "cost_per_1m_out": 10,
1744      "cost_per_1m_in_cached": 0,
1745      "cost_per_1m_out_cached": 1.25,
1746      "context_window": 128000,
1747      "default_max_tokens": 8192,
1748      "can_reason": false,
1749      "supports_attachments": true,
1750      "options": {}
1751    },
1752    {
1753      "id": "openai/gpt-4o-2024-11-20",
1754      "name": "OpenAI: GPT-4o (2024-11-20)",
1755      "cost_per_1m_in": 2.5,
1756      "cost_per_1m_out": 10,
1757      "cost_per_1m_in_cached": 0,
1758      "cost_per_1m_out_cached": 1.25,
1759      "context_window": 128000,
1760      "default_max_tokens": 8192,
1761      "can_reason": false,
1762      "supports_attachments": true,
1763      "options": {}
1764    },
1765    {
1766      "id": "openai/gpt-4o:extended",
1767      "name": "OpenAI: GPT-4o (extended)",
1768      "cost_per_1m_in": 6,
1769      "cost_per_1m_out": 18,
1770      "cost_per_1m_in_cached": 0,
1771      "cost_per_1m_out_cached": 0,
1772      "context_window": 128000,
1773      "default_max_tokens": 32000,
1774      "can_reason": false,
1775      "supports_attachments": true,
1776      "options": {}
1777    },
1778    {
1779      "id": "openai/gpt-4o-audio-preview",
1780      "name": "OpenAI: GPT-4o Audio",
1781      "cost_per_1m_in": 2.5,
1782      "cost_per_1m_out": 10,
1783      "cost_per_1m_in_cached": 0,
1784      "cost_per_1m_out_cached": 0,
1785      "context_window": 128000,
1786      "default_max_tokens": 8192,
1787      "can_reason": false,
1788      "supports_attachments": false,
1789      "options": {}
1790    },
1791    {
1792      "id": "openai/gpt-4o-mini",
1793      "name": "OpenAI: GPT-4o-mini",
1794      "cost_per_1m_in": 0.15,
1795      "cost_per_1m_out": 0.6,
1796      "cost_per_1m_in_cached": 0,
1797      "cost_per_1m_out_cached": 0.075,
1798      "context_window": 128000,
1799      "default_max_tokens": 8192,
1800      "can_reason": false,
1801      "supports_attachments": true,
1802      "options": {}
1803    },
1804    {
1805      "id": "openai/gpt-4o-mini-2024-07-18",
1806      "name": "OpenAI: GPT-4o-mini (2024-07-18)",
1807      "cost_per_1m_in": 0.15,
1808      "cost_per_1m_out": 0.6,
1809      "cost_per_1m_in_cached": 0,
1810      "cost_per_1m_out_cached": 0.075,
1811      "context_window": 128000,
1812      "default_max_tokens": 8192,
1813      "can_reason": false,
1814      "supports_attachments": true,
1815      "options": {}
1816    },
1817    {
1818      "id": "openai/gpt-5",
1819      "name": "OpenAI: GPT-5",
1820      "cost_per_1m_in": 1.25,
1821      "cost_per_1m_out": 10,
1822      "cost_per_1m_in_cached": 0,
1823      "cost_per_1m_out_cached": 0.125,
1824      "context_window": 400000,
1825      "default_max_tokens": 64000,
1826      "can_reason": true,
1827      "reasoning_levels": [
1828        "low",
1829        "medium",
1830        "high"
1831      ],
1832      "default_reasoning_effort": "medium",
1833      "supports_attachments": true,
1834      "options": {}
1835    },
1836    {
1837      "id": "openai/gpt-5-codex",
1838      "name": "OpenAI: GPT-5 Codex",
1839      "cost_per_1m_in": 1.25,
1840      "cost_per_1m_out": 10,
1841      "cost_per_1m_in_cached": 0,
1842      "cost_per_1m_out_cached": 0.125,
1843      "context_window": 400000,
1844      "default_max_tokens": 64000,
1845      "can_reason": true,
1846      "reasoning_levels": [
1847        "low",
1848        "medium",
1849        "high"
1850      ],
1851      "default_reasoning_effort": "medium",
1852      "supports_attachments": true,
1853      "options": {}
1854    },
1855    {
1856      "id": "openai/gpt-5-image",
1857      "name": "OpenAI: GPT-5 Image",
1858      "cost_per_1m_in": 10,
1859      "cost_per_1m_out": 10,
1860      "cost_per_1m_in_cached": 0,
1861      "cost_per_1m_out_cached": 1.25,
1862      "context_window": 400000,
1863      "default_max_tokens": 64000,
1864      "can_reason": true,
1865      "reasoning_levels": [
1866        "low",
1867        "medium",
1868        "high"
1869      ],
1870      "default_reasoning_effort": "medium",
1871      "supports_attachments": true,
1872      "options": {}
1873    },
1874    {
1875      "id": "openai/gpt-5-image-mini",
1876      "name": "OpenAI: GPT-5 Image Mini",
1877      "cost_per_1m_in": 2.5,
1878      "cost_per_1m_out": 2,
1879      "cost_per_1m_in_cached": 0,
1880      "cost_per_1m_out_cached": 0.25,
1881      "context_window": 400000,
1882      "default_max_tokens": 64000,
1883      "can_reason": true,
1884      "reasoning_levels": [
1885        "low",
1886        "medium",
1887        "high"
1888      ],
1889      "default_reasoning_effort": "medium",
1890      "supports_attachments": true,
1891      "options": {}
1892    },
1893    {
1894      "id": "openai/gpt-5-mini",
1895      "name": "OpenAI: GPT-5 Mini",
1896      "cost_per_1m_in": 0.25,
1897      "cost_per_1m_out": 2,
1898      "cost_per_1m_in_cached": 0,
1899      "cost_per_1m_out_cached": 0.024999999999999998,
1900      "context_window": 400000,
1901      "default_max_tokens": 64000,
1902      "can_reason": true,
1903      "reasoning_levels": [
1904        "low",
1905        "medium",
1906        "high"
1907      ],
1908      "default_reasoning_effort": "medium",
1909      "supports_attachments": true,
1910      "options": {}
1911    },
1912    {
1913      "id": "openai/gpt-5-nano",
1914      "name": "OpenAI: GPT-5 Nano",
1915      "cost_per_1m_in": 0.049999999999999996,
1916      "cost_per_1m_out": 0.39999999999999997,
1917      "cost_per_1m_in_cached": 0,
1918      "cost_per_1m_out_cached": 0.01,
1919      "context_window": 400000,
1920      "default_max_tokens": 40000,
1921      "can_reason": true,
1922      "reasoning_levels": [
1923        "low",
1924        "medium",
1925        "high"
1926      ],
1927      "default_reasoning_effort": "medium",
1928      "supports_attachments": true,
1929      "options": {}
1930    },
1931    {
1932      "id": "openai/gpt-5-pro",
1933      "name": "OpenAI: GPT-5 Pro",
1934      "cost_per_1m_in": 15,
1935      "cost_per_1m_out": 120,
1936      "cost_per_1m_in_cached": 0,
1937      "cost_per_1m_out_cached": 0,
1938      "context_window": 400000,
1939      "default_max_tokens": 64000,
1940      "can_reason": true,
1941      "reasoning_levels": [
1942        "low",
1943        "medium",
1944        "high"
1945      ],
1946      "default_reasoning_effort": "medium",
1947      "supports_attachments": true,
1948      "options": {}
1949    },
1950    {
1951      "id": "openai/gpt-5.1",
1952      "name": "OpenAI: GPT-5.1",
1953      "cost_per_1m_in": 1.25,
1954      "cost_per_1m_out": 10,
1955      "cost_per_1m_in_cached": 0,
1956      "cost_per_1m_out_cached": 0.125,
1957      "context_window": 400000,
1958      "default_max_tokens": 64000,
1959      "can_reason": true,
1960      "reasoning_levels": [
1961        "low",
1962        "medium",
1963        "high"
1964      ],
1965      "default_reasoning_effort": "medium",
1966      "supports_attachments": true,
1967      "options": {}
1968    },
1969    {
1970      "id": "openai/gpt-5.1-chat",
1971      "name": "OpenAI: GPT-5.1 Chat",
1972      "cost_per_1m_in": 1.25,
1973      "cost_per_1m_out": 10,
1974      "cost_per_1m_in_cached": 0,
1975      "cost_per_1m_out_cached": 0.125,
1976      "context_window": 128000,
1977      "default_max_tokens": 8192,
1978      "can_reason": false,
1979      "supports_attachments": true,
1980      "options": {}
1981    },
1982    {
1983      "id": "openai/gpt-5.1-codex",
1984      "name": "OpenAI: GPT-5.1-Codex",
1985      "cost_per_1m_in": 1.25,
1986      "cost_per_1m_out": 10,
1987      "cost_per_1m_in_cached": 0,
1988      "cost_per_1m_out_cached": 0.125,
1989      "context_window": 400000,
1990      "default_max_tokens": 64000,
1991      "can_reason": true,
1992      "reasoning_levels": [
1993        "low",
1994        "medium",
1995        "high"
1996      ],
1997      "default_reasoning_effort": "medium",
1998      "supports_attachments": true,
1999      "options": {}
2000    },
2001    {
2002      "id": "openai/gpt-5.1-codex-mini",
2003      "name": "OpenAI: GPT-5.1-Codex-Mini",
2004      "cost_per_1m_in": 0.25,
2005      "cost_per_1m_out": 2,
2006      "cost_per_1m_in_cached": 0,
2007      "cost_per_1m_out_cached": 0.024999999999999998,
2008      "context_window": 400000,
2009      "default_max_tokens": 50000,
2010      "can_reason": true,
2011      "reasoning_levels": [
2012        "low",
2013        "medium",
2014        "high"
2015      ],
2016      "default_reasoning_effort": "medium",
2017      "supports_attachments": true,
2018      "options": {}
2019    },
2020    {
2021      "id": "openai/gpt-oss-120b",
2022      "name": "OpenAI: gpt-oss-120b",
2023      "cost_per_1m_in": 0.04,
2024      "cost_per_1m_out": 0.19999999999999998,
2025      "cost_per_1m_in_cached": 0,
2026      "cost_per_1m_out_cached": 0,
2027      "context_window": 131072,
2028      "default_max_tokens": 16384,
2029      "can_reason": true,
2030      "reasoning_levels": [
2031        "low",
2032        "medium",
2033        "high"
2034      ],
2035      "default_reasoning_effort": "medium",
2036      "supports_attachments": false,
2037      "options": {}
2038    },
2039    {
2040      "id": "openai/gpt-oss-120b:exacto",
2041      "name": "OpenAI: gpt-oss-120b (exacto)",
2042      "cost_per_1m_in": 0.04,
2043      "cost_per_1m_out": 0.19999999999999998,
2044      "cost_per_1m_in_cached": 0,
2045      "cost_per_1m_out_cached": 0,
2046      "context_window": 131072,
2047      "default_max_tokens": 16384,
2048      "can_reason": true,
2049      "reasoning_levels": [
2050        "low",
2051        "medium",
2052        "high"
2053      ],
2054      "default_reasoning_effort": "medium",
2055      "supports_attachments": false,
2056      "options": {}
2057    },
2058    {
2059      "id": "openai/gpt-oss-20b",
2060      "name": "OpenAI: gpt-oss-20b",
2061      "cost_per_1m_in": 0.03,
2062      "cost_per_1m_out": 0.14,
2063      "cost_per_1m_in_cached": 0,
2064      "cost_per_1m_out_cached": 0,
2065      "context_window": 131072,
2066      "default_max_tokens": 13107,
2067      "can_reason": true,
2068      "reasoning_levels": [
2069        "low",
2070        "medium",
2071        "high"
2072      ],
2073      "default_reasoning_effort": "medium",
2074      "supports_attachments": false,
2075      "options": {}
2076    },
2077    {
2078      "id": "openai/gpt-oss-20b:free",
2079      "name": "OpenAI: gpt-oss-20b (free)",
2080      "cost_per_1m_in": 0,
2081      "cost_per_1m_out": 0,
2082      "cost_per_1m_in_cached": 0,
2083      "cost_per_1m_out_cached": 0,
2084      "context_window": 131072,
2085      "default_max_tokens": 65536,
2086      "can_reason": true,
2087      "reasoning_levels": [
2088        "low",
2089        "medium",
2090        "high"
2091      ],
2092      "default_reasoning_effort": "medium",
2093      "supports_attachments": false,
2094      "options": {}
2095    },
2096    {
2097      "id": "openai/gpt-oss-safeguard-20b",
2098      "name": "OpenAI: gpt-oss-safeguard-20b",
2099      "cost_per_1m_in": 0.075,
2100      "cost_per_1m_out": 0.3,
2101      "cost_per_1m_in_cached": 0,
2102      "cost_per_1m_out_cached": 0.037,
2103      "context_window": 131072,
2104      "default_max_tokens": 32768,
2105      "can_reason": true,
2106      "reasoning_levels": [
2107        "low",
2108        "medium",
2109        "high"
2110      ],
2111      "default_reasoning_effort": "medium",
2112      "supports_attachments": false,
2113      "options": {}
2114    },
2115    {
2116      "id": "openai/o1",
2117      "name": "OpenAI: o1",
2118      "cost_per_1m_in": 15,
2119      "cost_per_1m_out": 60,
2120      "cost_per_1m_in_cached": 0,
2121      "cost_per_1m_out_cached": 7.5,
2122      "context_window": 200000,
2123      "default_max_tokens": 50000,
2124      "can_reason": false,
2125      "supports_attachments": true,
2126      "options": {}
2127    },
2128    {
2129      "id": "openai/o3",
2130      "name": "OpenAI: o3",
2131      "cost_per_1m_in": 2,
2132      "cost_per_1m_out": 8,
2133      "cost_per_1m_in_cached": 0,
2134      "cost_per_1m_out_cached": 0.5,
2135      "context_window": 200000,
2136      "default_max_tokens": 50000,
2137      "can_reason": true,
2138      "reasoning_levels": [
2139        "low",
2140        "medium",
2141        "high"
2142      ],
2143      "default_reasoning_effort": "medium",
2144      "supports_attachments": true,
2145      "options": {}
2146    },
2147    {
2148      "id": "openai/o3-deep-research",
2149      "name": "OpenAI: o3 Deep Research",
2150      "cost_per_1m_in": 10,
2151      "cost_per_1m_out": 40,
2152      "cost_per_1m_in_cached": 0,
2153      "cost_per_1m_out_cached": 2.5,
2154      "context_window": 200000,
2155      "default_max_tokens": 50000,
2156      "can_reason": true,
2157      "reasoning_levels": [
2158        "low",
2159        "medium",
2160        "high"
2161      ],
2162      "default_reasoning_effort": "medium",
2163      "supports_attachments": true,
2164      "options": {}
2165    },
2166    {
2167      "id": "openai/o3-mini",
2168      "name": "OpenAI: o3 Mini",
2169      "cost_per_1m_in": 1.1,
2170      "cost_per_1m_out": 4.4,
2171      "cost_per_1m_in_cached": 0,
2172      "cost_per_1m_out_cached": 0.55,
2173      "context_window": 200000,
2174      "default_max_tokens": 50000,
2175      "can_reason": false,
2176      "supports_attachments": false,
2177      "options": {}
2178    },
2179    {
2180      "id": "openai/o3-mini-high",
2181      "name": "OpenAI: o3 Mini High",
2182      "cost_per_1m_in": 1.1,
2183      "cost_per_1m_out": 4.4,
2184      "cost_per_1m_in_cached": 0,
2185      "cost_per_1m_out_cached": 0.55,
2186      "context_window": 200000,
2187      "default_max_tokens": 50000,
2188      "can_reason": false,
2189      "supports_attachments": false,
2190      "options": {}
2191    },
2192    {
2193      "id": "openai/o3-pro",
2194      "name": "OpenAI: o3 Pro",
2195      "cost_per_1m_in": 20,
2196      "cost_per_1m_out": 80,
2197      "cost_per_1m_in_cached": 0,
2198      "cost_per_1m_out_cached": 0,
2199      "context_window": 200000,
2200      "default_max_tokens": 50000,
2201      "can_reason": true,
2202      "reasoning_levels": [
2203        "low",
2204        "medium",
2205        "high"
2206      ],
2207      "default_reasoning_effort": "medium",
2208      "supports_attachments": true,
2209      "options": {}
2210    },
2211    {
2212      "id": "openai/o4-mini",
2213      "name": "OpenAI: o4 Mini",
2214      "cost_per_1m_in": 1.1,
2215      "cost_per_1m_out": 4.4,
2216      "cost_per_1m_in_cached": 0,
2217      "cost_per_1m_out_cached": 0.275,
2218      "context_window": 200000,
2219      "default_max_tokens": 50000,
2220      "can_reason": true,
2221      "reasoning_levels": [
2222        "low",
2223        "medium",
2224        "high"
2225      ],
2226      "default_reasoning_effort": "medium",
2227      "supports_attachments": true,
2228      "options": {}
2229    },
2230    {
2231      "id": "openai/o4-mini-deep-research",
2232      "name": "OpenAI: o4 Mini Deep Research",
2233      "cost_per_1m_in": 2,
2234      "cost_per_1m_out": 8,
2235      "cost_per_1m_in_cached": 0,
2236      "cost_per_1m_out_cached": 0.5,
2237      "context_window": 200000,
2238      "default_max_tokens": 50000,
2239      "can_reason": true,
2240      "reasoning_levels": [
2241        "low",
2242        "medium",
2243        "high"
2244      ],
2245      "default_reasoning_effort": "medium",
2246      "supports_attachments": true,
2247      "options": {}
2248    },
2249    {
2250      "id": "openai/o4-mini-high",
2251      "name": "OpenAI: o4 Mini High",
2252      "cost_per_1m_in": 1.1,
2253      "cost_per_1m_out": 4.4,
2254      "cost_per_1m_in_cached": 0,
2255      "cost_per_1m_out_cached": 0.275,
2256      "context_window": 200000,
2257      "default_max_tokens": 50000,
2258      "can_reason": true,
2259      "reasoning_levels": [
2260        "low",
2261        "medium",
2262        "high"
2263      ],
2264      "default_reasoning_effort": "medium",
2265      "supports_attachments": true,
2266      "options": {}
2267    },
2268    {
2269      "id": "qwen/qwen-2.5-72b-instruct",
2270      "name": "Qwen2.5 72B Instruct",
2271      "cost_per_1m_in": 0.07,
2272      "cost_per_1m_out": 0.26,
2273      "cost_per_1m_in_cached": 0,
2274      "cost_per_1m_out_cached": 0,
2275      "context_window": 32768,
2276      "default_max_tokens": 16384,
2277      "can_reason": false,
2278      "supports_attachments": false,
2279      "options": {}
2280    },
2281    {
2282      "id": "qwen/qwen-plus-2025-07-28",
2283      "name": "Qwen: Qwen Plus 0728",
2284      "cost_per_1m_in": 0.39999999999999997,
2285      "cost_per_1m_out": 1.2,
2286      "cost_per_1m_in_cached": 0,
2287      "cost_per_1m_out_cached": 0,
2288      "context_window": 1000000,
2289      "default_max_tokens": 16384,
2290      "can_reason": false,
2291      "supports_attachments": false,
2292      "options": {}
2293    },
2294    {
2295      "id": "qwen/qwen-plus-2025-07-28:thinking",
2296      "name": "Qwen: Qwen Plus 0728 (thinking)",
2297      "cost_per_1m_in": 0.39999999999999997,
2298      "cost_per_1m_out": 4,
2299      "cost_per_1m_in_cached": 0,
2300      "cost_per_1m_out_cached": 0,
2301      "context_window": 1000000,
2302      "default_max_tokens": 16384,
2303      "can_reason": true,
2304      "reasoning_levels": [
2305        "low",
2306        "medium",
2307        "high"
2308      ],
2309      "default_reasoning_effort": "medium",
2310      "supports_attachments": false,
2311      "options": {}
2312    },
2313    {
2314      "id": "qwen/qwen-vl-max",
2315      "name": "Qwen: Qwen VL Max",
2316      "cost_per_1m_in": 0.7999999999999999,
2317      "cost_per_1m_out": 3.1999999999999997,
2318      "cost_per_1m_in_cached": 0,
2319      "cost_per_1m_out_cached": 0,
2320      "context_window": 131072,
2321      "default_max_tokens": 4096,
2322      "can_reason": false,
2323      "supports_attachments": true,
2324      "options": {}
2325    },
2326    {
2327      "id": "qwen/qwen-max",
2328      "name": "Qwen: Qwen-Max ",
2329      "cost_per_1m_in": 1.5999999999999999,
2330      "cost_per_1m_out": 6.3999999999999995,
2331      "cost_per_1m_in_cached": 0,
2332      "cost_per_1m_out_cached": 0.64,
2333      "context_window": 32768,
2334      "default_max_tokens": 4096,
2335      "can_reason": false,
2336      "supports_attachments": false,
2337      "options": {}
2338    },
2339    {
2340      "id": "qwen/qwen-plus",
2341      "name": "Qwen: Qwen-Plus",
2342      "cost_per_1m_in": 0.39999999999999997,
2343      "cost_per_1m_out": 1.2,
2344      "cost_per_1m_in_cached": 0,
2345      "cost_per_1m_out_cached": 0.16,
2346      "context_window": 131072,
2347      "default_max_tokens": 4096,
2348      "can_reason": false,
2349      "supports_attachments": false,
2350      "options": {}
2351    },
2352    {
2353      "id": "qwen/qwen-turbo",
2354      "name": "Qwen: Qwen-Turbo",
2355      "cost_per_1m_in": 0.049999999999999996,
2356      "cost_per_1m_out": 0.19999999999999998,
2357      "cost_per_1m_in_cached": 0,
2358      "cost_per_1m_out_cached": 0.02,
2359      "context_window": 1000000,
2360      "default_max_tokens": 4096,
2361      "can_reason": false,
2362      "supports_attachments": false,
2363      "options": {}
2364    },
2365    {
2366      "id": "qwen/qwen3-14b",
2367      "name": "Qwen: Qwen3 14B",
2368      "cost_per_1m_in": 0.049999999999999996,
2369      "cost_per_1m_out": 0.22,
2370      "cost_per_1m_in_cached": 0,
2371      "cost_per_1m_out_cached": 0,
2372      "context_window": 40960,
2373      "default_max_tokens": 20480,
2374      "can_reason": true,
2375      "reasoning_levels": [
2376        "low",
2377        "medium",
2378        "high"
2379      ],
2380      "default_reasoning_effort": "medium",
2381      "supports_attachments": false,
2382      "options": {}
2383    },
2384    {
2385      "id": "qwen/qwen3-235b-a22b",
2386      "name": "Qwen: Qwen3 235B A22B",
2387      "cost_per_1m_in": 0.22,
2388      "cost_per_1m_out": 0.88,
2389      "cost_per_1m_in_cached": 0,
2390      "cost_per_1m_out_cached": 0,
2391      "context_window": 131072,
2392      "default_max_tokens": 13107,
2393      "can_reason": true,
2394      "reasoning_levels": [
2395        "low",
2396        "medium",
2397        "high"
2398      ],
2399      "default_reasoning_effort": "medium",
2400      "supports_attachments": false,
2401      "options": {}
2402    },
2403    {
2404      "id": "qwen/qwen3-235b-a22b:free",
2405      "name": "Qwen: Qwen3 235B A22B (free)",
2406      "cost_per_1m_in": 0,
2407      "cost_per_1m_out": 0,
2408      "cost_per_1m_in_cached": 0,
2409      "cost_per_1m_out_cached": 0,
2410      "context_window": 131072,
2411      "default_max_tokens": 13107,
2412      "can_reason": true,
2413      "reasoning_levels": [
2414        "low",
2415        "medium",
2416        "high"
2417      ],
2418      "default_reasoning_effort": "medium",
2419      "supports_attachments": false,
2420      "options": {}
2421    },
2422    {
2423      "id": "qwen/qwen3-235b-a22b-2507",
2424      "name": "Qwen: Qwen3 235B A22B Instruct 2507",
2425      "cost_per_1m_in": 0.08,
2426      "cost_per_1m_out": 0.55,
2427      "cost_per_1m_in_cached": 0,
2428      "cost_per_1m_out_cached": 0,
2429      "context_window": 262144,
2430      "default_max_tokens": 131072,
2431      "can_reason": false,
2432      "supports_attachments": false,
2433      "options": {}
2434    },
2435    {
2436      "id": "qwen/qwen3-235b-a22b-thinking-2507",
2437      "name": "Qwen: Qwen3 235B A22B Thinking 2507",
2438      "cost_per_1m_in": 0.11,
2439      "cost_per_1m_out": 0.6,
2440      "cost_per_1m_in_cached": 0,
2441      "cost_per_1m_out_cached": 0,
2442      "context_window": 262144,
2443      "default_max_tokens": 131072,
2444      "can_reason": true,
2445      "reasoning_levels": [
2446        "low",
2447        "medium",
2448        "high"
2449      ],
2450      "default_reasoning_effort": "medium",
2451      "supports_attachments": false,
2452      "options": {}
2453    },
2454    {
2455      "id": "qwen/qwen3-30b-a3b",
2456      "name": "Qwen: Qwen3 30B A3B",
2457      "cost_per_1m_in": 0.15,
2458      "cost_per_1m_out": 0.6,
2459      "cost_per_1m_in_cached": 0,
2460      "cost_per_1m_out_cached": 0,
2461      "context_window": 131072,
2462      "default_max_tokens": 4000,
2463      "can_reason": true,
2464      "reasoning_levels": [
2465        "low",
2466        "medium",
2467        "high"
2468      ],
2469      "default_reasoning_effort": "medium",
2470      "supports_attachments": false,
2471      "options": {}
2472    },
2473    {
2474      "id": "qwen/qwen3-30b-a3b-instruct-2507",
2475      "name": "Qwen: Qwen3 30B A3B Instruct 2507",
2476      "cost_per_1m_in": 0.09999999999999999,
2477      "cost_per_1m_out": 0.3,
2478      "cost_per_1m_in_cached": 0,
2479      "cost_per_1m_out_cached": 0,
2480      "context_window": 262144,
2481      "default_max_tokens": 26214,
2482      "can_reason": false,
2483      "supports_attachments": false,
2484      "options": {}
2485    },
2486    {
2487      "id": "qwen/qwen3-32b",
2488      "name": "Qwen: Qwen3 32B",
2489      "cost_per_1m_in": 0.15,
2490      "cost_per_1m_out": 0.5,
2491      "cost_per_1m_in_cached": 0,
2492      "cost_per_1m_out_cached": 0,
2493      "context_window": 131072,
2494      "default_max_tokens": 4000,
2495      "can_reason": true,
2496      "reasoning_levels": [
2497        "low",
2498        "medium",
2499        "high"
2500      ],
2501      "default_reasoning_effort": "medium",
2502      "supports_attachments": false,
2503      "options": {}
2504    },
2505    {
2506      "id": "qwen/qwen3-4b:free",
2507      "name": "Qwen: Qwen3 4B (free)",
2508      "cost_per_1m_in": 0,
2509      "cost_per_1m_out": 0,
2510      "cost_per_1m_in_cached": 0,
2511      "cost_per_1m_out_cached": 0,
2512      "context_window": 40960,
2513      "default_max_tokens": 4096,
2514      "can_reason": true,
2515      "reasoning_levels": [
2516        "low",
2517        "medium",
2518        "high"
2519      ],
2520      "default_reasoning_effort": "medium",
2521      "supports_attachments": false,
2522      "options": {}
2523    },
2524    {
2525      "id": "qwen/qwen3-8b",
2526      "name": "Qwen: Qwen3 8B",
2527      "cost_per_1m_in": 0.2,
2528      "cost_per_1m_out": 0.2,
2529      "cost_per_1m_in_cached": 0,
2530      "cost_per_1m_out_cached": 0,
2531      "context_window": 40960,
2532      "default_max_tokens": 4096,
2533      "can_reason": true,
2534      "reasoning_levels": [
2535        "low",
2536        "medium",
2537        "high"
2538      ],
2539      "default_reasoning_effort": "medium",
2540      "supports_attachments": false,
2541      "options": {}
2542    },
2543    {
2544      "id": "qwen/qwen3-coder-30b-a3b-instruct",
2545      "name": "Qwen: Qwen3 Coder 30B A3B Instruct",
2546      "cost_per_1m_in": 0.06,
2547      "cost_per_1m_out": 0.25,
2548      "cost_per_1m_in_cached": 0,
2549      "cost_per_1m_out_cached": 0,
2550      "context_window": 262144,
2551      "default_max_tokens": 131072,
2552      "can_reason": false,
2553      "supports_attachments": false,
2554      "options": {}
2555    },
2556    {
2557      "id": "qwen/qwen3-coder",
2558      "name": "Qwen: Qwen3 Coder 480B A35B",
2559      "cost_per_1m_in": 0.232,
2560      "cost_per_1m_out": 0.96,
2561      "cost_per_1m_in_cached": 0,
2562      "cost_per_1m_out_cached": 0,
2563      "context_window": 262144,
2564      "default_max_tokens": 32768,
2565      "can_reason": false,
2566      "supports_attachments": false,
2567      "options": {}
2568    },
2569    {
2570      "id": "qwen/qwen3-coder:exacto",
2571      "name": "Qwen: Qwen3 Coder 480B A35B (exacto)",
2572      "cost_per_1m_in": 1,
2573      "cost_per_1m_out": 4,
2574      "cost_per_1m_in_cached": 0,
2575      "cost_per_1m_out_cached": 0,
2576      "context_window": 262144,
2577      "default_max_tokens": 16384,
2578      "can_reason": false,
2579      "supports_attachments": false,
2580      "options": {}
2581    },
2582    {
2583      "id": "qwen/qwen3-coder:free",
2584      "name": "Qwen: Qwen3 Coder 480B A35B (free)",
2585      "cost_per_1m_in": 0,
2586      "cost_per_1m_out": 0,
2587      "cost_per_1m_in_cached": 0,
2588      "cost_per_1m_out_cached": 0,
2589      "context_window": 262000,
2590      "default_max_tokens": 131000,
2591      "can_reason": false,
2592      "supports_attachments": false,
2593      "options": {}
2594    },
2595    {
2596      "id": "qwen/qwen3-coder-flash",
2597      "name": "Qwen: Qwen3 Coder Flash",
2598      "cost_per_1m_in": 0.3,
2599      "cost_per_1m_out": 1.5,
2600      "cost_per_1m_in_cached": 0,
2601      "cost_per_1m_out_cached": 0.08,
2602      "context_window": 128000,
2603      "default_max_tokens": 32768,
2604      "can_reason": false,
2605      "supports_attachments": false,
2606      "options": {}
2607    },
2608    {
2609      "id": "qwen/qwen3-coder-plus",
2610      "name": "Qwen: Qwen3 Coder Plus",
2611      "cost_per_1m_in": 1,
2612      "cost_per_1m_out": 5,
2613      "cost_per_1m_in_cached": 0,
2614      "cost_per_1m_out_cached": 0.09999999999999999,
2615      "context_window": 128000,
2616      "default_max_tokens": 32768,
2617      "can_reason": false,
2618      "supports_attachments": false,
2619      "options": {}
2620    },
2621    {
2622      "id": "qwen/qwen3-max",
2623      "name": "Qwen: Qwen3 Max",
2624      "cost_per_1m_in": 1.2,
2625      "cost_per_1m_out": 6,
2626      "cost_per_1m_in_cached": 0,
2627      "cost_per_1m_out_cached": 0.24,
2628      "context_window": 256000,
2629      "default_max_tokens": 16384,
2630      "can_reason": false,
2631      "supports_attachments": false,
2632      "options": {}
2633    },
2634    {
2635      "id": "qwen/qwen3-next-80b-a3b-instruct",
2636      "name": "Qwen: Qwen3 Next 80B A3B Instruct",
2637      "cost_per_1m_in": 0.15,
2638      "cost_per_1m_out": 1.5,
2639      "cost_per_1m_in_cached": 0,
2640      "cost_per_1m_out_cached": 0,
2641      "context_window": 262144,
2642      "default_max_tokens": 131072,
2643      "can_reason": false,
2644      "supports_attachments": false,
2645      "options": {}
2646    },
2647    {
2648      "id": "qwen/qwen3-next-80b-a3b-thinking",
2649      "name": "Qwen: Qwen3 Next 80B A3B Thinking",
2650      "cost_per_1m_in": 0.15,
2651      "cost_per_1m_out": 1.2,
2652      "cost_per_1m_in_cached": 0,
2653      "cost_per_1m_out_cached": 0,
2654      "context_window": 262144,
2655      "default_max_tokens": 131072,
2656      "can_reason": true,
2657      "reasoning_levels": [
2658        "low",
2659        "medium",
2660        "high"
2661      ],
2662      "default_reasoning_effort": "medium",
2663      "supports_attachments": false,
2664      "options": {}
2665    },
2666    {
2667      "id": "qwen/qwen3-vl-235b-a22b-instruct",
2668      "name": "Qwen: Qwen3 VL 235B A22B Instruct",
2669      "cost_per_1m_in": 0.39999999999999997,
2670      "cost_per_1m_out": 1.5999999999999999,
2671      "cost_per_1m_in_cached": 0,
2672      "cost_per_1m_out_cached": 0,
2673      "context_window": 131072,
2674      "default_max_tokens": 16384,
2675      "can_reason": false,
2676      "supports_attachments": true,
2677      "options": {}
2678    },
2679    {
2680      "id": "qwen/qwen3-vl-235b-a22b-thinking",
2681      "name": "Qwen: Qwen3 VL 235B A22B Thinking",
2682      "cost_per_1m_in": 0.3,
2683      "cost_per_1m_out": 1.2,
2684      "cost_per_1m_in_cached": 0,
2685      "cost_per_1m_out_cached": 0,
2686      "context_window": 262144,
2687      "default_max_tokens": 131072,
2688      "can_reason": true,
2689      "reasoning_levels": [
2690        "low",
2691        "medium",
2692        "high"
2693      ],
2694      "default_reasoning_effort": "medium",
2695      "supports_attachments": true,
2696      "options": {}
2697    },
2698    {
2699      "id": "qwen/qwen3-vl-30b-a3b-instruct",
2700      "name": "Qwen: Qwen3 VL 30B A3B Instruct",
2701      "cost_per_1m_in": 0.29,
2702      "cost_per_1m_out": 1,
2703      "cost_per_1m_in_cached": 0,
2704      "cost_per_1m_out_cached": 0,
2705      "context_window": 262144,
2706      "default_max_tokens": 131072,
2707      "can_reason": false,
2708      "supports_attachments": true,
2709      "options": {}
2710    },
2711    {
2712      "id": "qwen/qwen3-vl-30b-a3b-thinking",
2713      "name": "Qwen: Qwen3 VL 30B A3B Thinking",
2714      "cost_per_1m_in": 0.16,
2715      "cost_per_1m_out": 0.7999999999999999,
2716      "cost_per_1m_in_cached": 0,
2717      "cost_per_1m_out_cached": 0,
2718      "context_window": 131072,
2719      "default_max_tokens": 16384,
2720      "can_reason": true,
2721      "reasoning_levels": [
2722        "low",
2723        "medium",
2724        "high"
2725      ],
2726      "default_reasoning_effort": "medium",
2727      "supports_attachments": true,
2728      "options": {}
2729    },
2730    {
2731      "id": "qwen/qwen3-vl-8b-instruct",
2732      "name": "Qwen: Qwen3 VL 8B Instruct",
2733      "cost_per_1m_in": 0.18,
2734      "cost_per_1m_out": 0.7,
2735      "cost_per_1m_in_cached": 0,
2736      "cost_per_1m_out_cached": 0,
2737      "context_window": 256000,
2738      "default_max_tokens": 16384,
2739      "can_reason": false,
2740      "supports_attachments": true,
2741      "options": {}
2742    },
2743    {
2744      "id": "qwen/qwen3-vl-8b-thinking",
2745      "name": "Qwen: Qwen3 VL 8B Thinking",
2746      "cost_per_1m_in": 0.18,
2747      "cost_per_1m_out": 2.0999999999999996,
2748      "cost_per_1m_in_cached": 0,
2749      "cost_per_1m_out_cached": 0,
2750      "context_window": 256000,
2751      "default_max_tokens": 16384,
2752      "can_reason": true,
2753      "reasoning_levels": [
2754        "low",
2755        "medium",
2756        "high"
2757      ],
2758      "default_reasoning_effort": "medium",
2759      "supports_attachments": true,
2760      "options": {}
2761    },
2762    {
2763      "id": "stepfun-ai/step3",
2764      "name": "StepFun: Step3",
2765      "cost_per_1m_in": 0.5700000000000001,
2766      "cost_per_1m_out": 1.42,
2767      "cost_per_1m_in_cached": 0,
2768      "cost_per_1m_out_cached": 0,
2769      "context_window": 65536,
2770      "default_max_tokens": 32768,
2771      "can_reason": true,
2772      "reasoning_levels": [
2773        "low",
2774        "medium",
2775        "high"
2776      ],
2777      "default_reasoning_effort": "medium",
2778      "supports_attachments": true,
2779      "options": {}
2780    },
2781    {
2782      "id": "tngtech/deepseek-r1t2-chimera",
2783      "name": "TNG: DeepSeek R1T2 Chimera",
2784      "cost_per_1m_in": 0.3,
2785      "cost_per_1m_out": 1.2,
2786      "cost_per_1m_in_cached": 0,
2787      "cost_per_1m_out_cached": 0,
2788      "context_window": 163840,
2789      "default_max_tokens": 81920,
2790      "can_reason": true,
2791      "reasoning_levels": [
2792        "low",
2793        "medium",
2794        "high"
2795      ],
2796      "default_reasoning_effort": "medium",
2797      "supports_attachments": false,
2798      "options": {}
2799    },
2800    {
2801      "id": "thedrummer/rocinante-12b",
2802      "name": "TheDrummer: Rocinante 12B",
2803      "cost_per_1m_in": 0.16999999999999998,
2804      "cost_per_1m_out": 0.43,
2805      "cost_per_1m_in_cached": 0,
2806      "cost_per_1m_out_cached": 0,
2807      "context_window": 32768,
2808      "default_max_tokens": 3276,
2809      "can_reason": false,
2810      "supports_attachments": false,
2811      "options": {}
2812    },
2813    {
2814      "id": "thedrummer/unslopnemo-12b",
2815      "name": "TheDrummer: UnslopNemo 12B",
2816      "cost_per_1m_in": 0.39999999999999997,
2817      "cost_per_1m_out": 0.39999999999999997,
2818      "cost_per_1m_in_cached": 0,
2819      "cost_per_1m_out_cached": 0,
2820      "context_window": 32768,
2821      "default_max_tokens": 3276,
2822      "can_reason": false,
2823      "supports_attachments": false,
2824      "options": {}
2825    },
2826    {
2827      "id": "alibaba/tongyi-deepresearch-30b-a3b",
2828      "name": "Tongyi DeepResearch 30B A3B",
2829      "cost_per_1m_in": 0.09,
2830      "cost_per_1m_out": 0.39999999999999997,
2831      "cost_per_1m_in_cached": 0,
2832      "cost_per_1m_out_cached": 0,
2833      "context_window": 131072,
2834      "default_max_tokens": 65536,
2835      "can_reason": true,
2836      "reasoning_levels": [
2837        "low",
2838        "medium",
2839        "high"
2840      ],
2841      "default_reasoning_effort": "medium",
2842      "supports_attachments": false,
2843      "options": {}
2844    },
2845    {
2846      "id": "alibaba/tongyi-deepresearch-30b-a3b:free",
2847      "name": "Tongyi DeepResearch 30B A3B (free)",
2848      "cost_per_1m_in": 0,
2849      "cost_per_1m_out": 0,
2850      "cost_per_1m_in_cached": 0,
2851      "cost_per_1m_out_cached": 0,
2852      "context_window": 131072,
2853      "default_max_tokens": 65536,
2854      "can_reason": true,
2855      "reasoning_levels": [
2856        "low",
2857        "medium",
2858        "high"
2859      ],
2860      "default_reasoning_effort": "medium",
2861      "supports_attachments": false,
2862      "options": {}
2863    },
2864    {
2865      "id": "z-ai/glm-4-32b",
2866      "name": "Z.AI: GLM 4 32B ",
2867      "cost_per_1m_in": 0.09999999999999999,
2868      "cost_per_1m_out": 0.09999999999999999,
2869      "cost_per_1m_in_cached": 0,
2870      "cost_per_1m_out_cached": 0,
2871      "context_window": 128000,
2872      "default_max_tokens": 12800,
2873      "can_reason": false,
2874      "supports_attachments": false,
2875      "options": {}
2876    },
2877    {
2878      "id": "z-ai/glm-4.5",
2879      "name": "Z.AI: GLM 4.5",
2880      "cost_per_1m_in": 0.35,
2881      "cost_per_1m_out": 1.55,
2882      "cost_per_1m_in_cached": 0,
2883      "cost_per_1m_out_cached": 0,
2884      "context_window": 131072,
2885      "default_max_tokens": 65536,
2886      "can_reason": true,
2887      "reasoning_levels": [
2888        "low",
2889        "medium",
2890        "high"
2891      ],
2892      "default_reasoning_effort": "medium",
2893      "supports_attachments": false,
2894      "options": {}
2895    },
2896    {
2897      "id": "z-ai/glm-4.5-air",
2898      "name": "Z.AI: GLM 4.5 Air",
2899      "cost_per_1m_in": 0.14,
2900      "cost_per_1m_out": 0.86,
2901      "cost_per_1m_in_cached": 0,
2902      "cost_per_1m_out_cached": 0,
2903      "context_window": 131072,
2904      "default_max_tokens": 65536,
2905      "can_reason": true,
2906      "reasoning_levels": [
2907        "low",
2908        "medium",
2909        "high"
2910      ],
2911      "default_reasoning_effort": "medium",
2912      "supports_attachments": false,
2913      "options": {}
2914    },
2915    {
2916      "id": "z-ai/glm-4.5-air:free",
2917      "name": "Z.AI: GLM 4.5 Air (free)",
2918      "cost_per_1m_in": 0,
2919      "cost_per_1m_out": 0,
2920      "cost_per_1m_in_cached": 0,
2921      "cost_per_1m_out_cached": 0,
2922      "context_window": 131072,
2923      "default_max_tokens": 65536,
2924      "can_reason": true,
2925      "reasoning_levels": [
2926        "low",
2927        "medium",
2928        "high"
2929      ],
2930      "default_reasoning_effort": "medium",
2931      "supports_attachments": false,
2932      "options": {}
2933    },
2934    {
2935      "id": "z-ai/glm-4.5v",
2936      "name": "Z.AI: GLM 4.5V",
2937      "cost_per_1m_in": 0.48,
2938      "cost_per_1m_out": 1.44,
2939      "cost_per_1m_in_cached": 0,
2940      "cost_per_1m_out_cached": 0.11,
2941      "context_window": 65536,
2942      "default_max_tokens": 8192,
2943      "can_reason": true,
2944      "reasoning_levels": [
2945        "low",
2946        "medium",
2947        "high"
2948      ],
2949      "default_reasoning_effort": "medium",
2950      "supports_attachments": true,
2951      "options": {}
2952    },
2953    {
2954      "id": "z-ai/glm-4.6",
2955      "name": "Z.AI: GLM 4.6",
2956      "cost_per_1m_in": 0.48,
2957      "cost_per_1m_out": 1.76,
2958      "cost_per_1m_in_cached": 0,
2959      "cost_per_1m_out_cached": 0.11,
2960      "context_window": 204800,
2961      "default_max_tokens": 65536,
2962      "can_reason": true,
2963      "reasoning_levels": [
2964        "low",
2965        "medium",
2966        "high"
2967      ],
2968      "default_reasoning_effort": "medium",
2969      "supports_attachments": false,
2970      "options": {}
2971    },
2972    {
2973      "id": "z-ai/glm-4.6:exacto",
2974      "name": "Z.AI: GLM 4.6 (exacto)",
2975      "cost_per_1m_in": 0.48,
2976      "cost_per_1m_out": 1.76,
2977      "cost_per_1m_in_cached": 0,
2978      "cost_per_1m_out_cached": 0,
2979      "context_window": 204800,
2980      "default_max_tokens": 65536,
2981      "can_reason": true,
2982      "reasoning_levels": [
2983        "low",
2984        "medium",
2985        "high"
2986      ],
2987      "default_reasoning_effort": "medium",
2988      "supports_attachments": false,
2989      "options": {}
2990    },
2991    {
2992      "id": "x-ai/grok-3",
2993      "name": "xAI: Grok 3",
2994      "cost_per_1m_in": 5,
2995      "cost_per_1m_out": 25,
2996      "cost_per_1m_in_cached": 0,
2997      "cost_per_1m_out_cached": 1.25,
2998      "context_window": 131072,
2999      "default_max_tokens": 13107,
3000      "can_reason": false,
3001      "supports_attachments": false,
3002      "options": {}
3003    },
3004    {
3005      "id": "x-ai/grok-3-beta",
3006      "name": "xAI: Grok 3 Beta",
3007      "cost_per_1m_in": 5,
3008      "cost_per_1m_out": 25,
3009      "cost_per_1m_in_cached": 0,
3010      "cost_per_1m_out_cached": 1.25,
3011      "context_window": 131072,
3012      "default_max_tokens": 13107,
3013      "can_reason": false,
3014      "supports_attachments": false,
3015      "options": {}
3016    },
3017    {
3018      "id": "x-ai/grok-3-mini",
3019      "name": "xAI: Grok 3 Mini",
3020      "cost_per_1m_in": 0.3,
3021      "cost_per_1m_out": 0.5,
3022      "cost_per_1m_in_cached": 0,
3023      "cost_per_1m_out_cached": 0.075,
3024      "context_window": 131072,
3025      "default_max_tokens": 13107,
3026      "can_reason": true,
3027      "reasoning_levels": [
3028        "low",
3029        "medium",
3030        "high"
3031      ],
3032      "default_reasoning_effort": "medium",
3033      "supports_attachments": false,
3034      "options": {}
3035    },
3036    {
3037      "id": "x-ai/grok-3-mini-beta",
3038      "name": "xAI: Grok 3 Mini Beta",
3039      "cost_per_1m_in": 0.3,
3040      "cost_per_1m_out": 0.5,
3041      "cost_per_1m_in_cached": 0,
3042      "cost_per_1m_out_cached": 0.075,
3043      "context_window": 131072,
3044      "default_max_tokens": 13107,
3045      "can_reason": true,
3046      "reasoning_levels": [
3047        "low",
3048        "medium",
3049        "high"
3050      ],
3051      "default_reasoning_effort": "medium",
3052      "supports_attachments": false,
3053      "options": {}
3054    },
3055    {
3056      "id": "x-ai/grok-4",
3057      "name": "xAI: Grok 4",
3058      "cost_per_1m_in": 3,
3059      "cost_per_1m_out": 15,
3060      "cost_per_1m_in_cached": 0,
3061      "cost_per_1m_out_cached": 0.75,
3062      "context_window": 256000,
3063      "default_max_tokens": 25600,
3064      "can_reason": true,
3065      "reasoning_levels": [
3066        "low",
3067        "medium",
3068        "high"
3069      ],
3070      "default_reasoning_effort": "medium",
3071      "supports_attachments": true,
3072      "options": {}
3073    },
3074    {
3075      "id": "x-ai/grok-4-fast",
3076      "name": "xAI: Grok 4 Fast",
3077      "cost_per_1m_in": 0.19999999999999998,
3078      "cost_per_1m_out": 0.5,
3079      "cost_per_1m_in_cached": 0,
3080      "cost_per_1m_out_cached": 0.049999999999999996,
3081      "context_window": 2000000,
3082      "default_max_tokens": 15000,
3083      "can_reason": true,
3084      "reasoning_levels": [
3085        "low",
3086        "medium",
3087        "high"
3088      ],
3089      "default_reasoning_effort": "medium",
3090      "supports_attachments": true,
3091      "options": {}
3092    },
3093    {
3094      "id": "x-ai/grok-4.1-fast",
3095      "name": "xAI: Grok 4.1 Fast",
3096      "cost_per_1m_in": 0,
3097      "cost_per_1m_out": 0,
3098      "cost_per_1m_in_cached": 0,
3099      "cost_per_1m_out_cached": 0,
3100      "context_window": 2000000,
3101      "default_max_tokens": 15000,
3102      "can_reason": true,
3103      "reasoning_levels": [
3104        "low",
3105        "medium",
3106        "high"
3107      ],
3108      "default_reasoning_effort": "medium",
3109      "supports_attachments": true,
3110      "options": {}
3111    },
3112    {
3113      "id": "x-ai/grok-4.1-fast:free",
3114      "name": "xAI: Grok 4.1 Fast (free)",
3115      "cost_per_1m_in": 0,
3116      "cost_per_1m_out": 0,
3117      "cost_per_1m_in_cached": 0,
3118      "cost_per_1m_out_cached": 0,
3119      "context_window": 2000000,
3120      "default_max_tokens": 15000,
3121      "can_reason": true,
3122      "reasoning_levels": [
3123        "low",
3124        "medium",
3125        "high"
3126      ],
3127      "default_reasoning_effort": "medium",
3128      "supports_attachments": true,
3129      "options": {}
3130    },
3131    {
3132      "id": "x-ai/grok-code-fast-1",
3133      "name": "xAI: Grok Code Fast 1",
3134      "cost_per_1m_in": 0.19999999999999998,
3135      "cost_per_1m_out": 1.5,
3136      "cost_per_1m_in_cached": 0,
3137      "cost_per_1m_out_cached": 0.02,
3138      "context_window": 256000,
3139      "default_max_tokens": 5000,
3140      "can_reason": true,
3141      "reasoning_levels": [
3142        "low",
3143        "medium",
3144        "high"
3145      ],
3146      "default_reasoning_effort": "medium",
3147      "supports_attachments": false,
3148      "options": {}
3149    }
3150  ],
3151  "default_headers": {
3152    "HTTP-Referer": "https://charm.land",
3153    "X-Title": "Crush"
3154  }
3155}