openrouter.json

   1{
   2  "name": "OpenRouter",
   3  "id": "openrouter",
   4  "api_key": "$OPENROUTER_API_KEY",
   5  "api_endpoint": "https://openrouter.ai/api/v1",
   6  "type": "openrouter",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-3.5-haiku",
   9  "models": [
  10    {
  11      "id": "ai21/jamba-large-1.7",
  12      "name": "AI21: Jamba Large 1.7",
  13      "cost_per_1m_in": 2,
  14      "cost_per_1m_out": 8,
  15      "cost_per_1m_in_cached": 0,
  16      "cost_per_1m_out_cached": 0,
  17      "context_window": 256000,
  18      "default_max_tokens": 2048,
  19      "can_reason": false,
  20      "supports_attachments": false,
  21      "options": {}
  22    },
  23    {
  24      "id": "ai21/jamba-mini-1.7",
  25      "name": "AI21: Jamba Mini 1.7",
  26      "cost_per_1m_in": 0.19999999999999998,
  27      "cost_per_1m_out": 0.39999999999999997,
  28      "cost_per_1m_in_cached": 0,
  29      "cost_per_1m_out_cached": 0,
  30      "context_window": 256000,
  31      "default_max_tokens": 2048,
  32      "can_reason": false,
  33      "supports_attachments": false,
  34      "options": {}
  35    },
  36    {
  37      "id": "amazon/nova-lite-v1",
  38      "name": "Amazon: Nova Lite 1.0",
  39      "cost_per_1m_in": 0.06,
  40      "cost_per_1m_out": 0.24,
  41      "cost_per_1m_in_cached": 0,
  42      "cost_per_1m_out_cached": 0,
  43      "context_window": 300000,
  44      "default_max_tokens": 2560,
  45      "can_reason": false,
  46      "supports_attachments": true,
  47      "options": {}
  48    },
  49    {
  50      "id": "amazon/nova-micro-v1",
  51      "name": "Amazon: Nova Micro 1.0",
  52      "cost_per_1m_in": 0.035,
  53      "cost_per_1m_out": 0.14,
  54      "cost_per_1m_in_cached": 0,
  55      "cost_per_1m_out_cached": 0,
  56      "context_window": 128000,
  57      "default_max_tokens": 2560,
  58      "can_reason": false,
  59      "supports_attachments": false,
  60      "options": {}
  61    },
  62    {
  63      "id": "amazon/nova-pro-v1",
  64      "name": "Amazon: Nova Pro 1.0",
  65      "cost_per_1m_in": 0.7999999999999999,
  66      "cost_per_1m_out": 3.1999999999999997,
  67      "cost_per_1m_in_cached": 0,
  68      "cost_per_1m_out_cached": 0,
  69      "context_window": 300000,
  70      "default_max_tokens": 2560,
  71      "can_reason": false,
  72      "supports_attachments": true,
  73      "options": {}
  74    },
  75    {
  76      "id": "openrouter/andromeda-alpha",
  77      "name": "Andromeda Alpha",
  78      "cost_per_1m_in": 0,
  79      "cost_per_1m_out": 0,
  80      "cost_per_1m_in_cached": 0,
  81      "cost_per_1m_out_cached": 0,
  82      "context_window": 128000,
  83      "default_max_tokens": 64000,
  84      "can_reason": true,
  85      "reasoning_levels": [
  86        "low",
  87        "medium",
  88        "high"
  89      ],
  90      "default_reasoning_effort": "medium",
  91      "supports_attachments": true,
  92      "options": {}
  93    },
  94    {
  95      "id": "anthropic/claude-3-haiku",
  96      "name": "Anthropic: Claude 3 Haiku",
  97      "cost_per_1m_in": 0.25,
  98      "cost_per_1m_out": 1.25,
  99      "cost_per_1m_in_cached": 0.3,
 100      "cost_per_1m_out_cached": 0.03,
 101      "context_window": 200000,
 102      "default_max_tokens": 2048,
 103      "can_reason": false,
 104      "supports_attachments": true,
 105      "options": {}
 106    },
 107    {
 108      "id": "anthropic/claude-3-opus",
 109      "name": "Anthropic: Claude 3 Opus",
 110      "cost_per_1m_in": 15,
 111      "cost_per_1m_out": 75,
 112      "cost_per_1m_in_cached": 18.75,
 113      "cost_per_1m_out_cached": 1.5,
 114      "context_window": 200000,
 115      "default_max_tokens": 2048,
 116      "can_reason": false,
 117      "supports_attachments": true,
 118      "options": {}
 119    },
 120    {
 121      "id": "anthropic/claude-3.5-haiku",
 122      "name": "Anthropic: Claude 3.5 Haiku",
 123      "cost_per_1m_in": 0.7999999999999999,
 124      "cost_per_1m_out": 4,
 125      "cost_per_1m_in_cached": 1,
 126      "cost_per_1m_out_cached": 0.08,
 127      "context_window": 200000,
 128      "default_max_tokens": 4096,
 129      "can_reason": false,
 130      "supports_attachments": true,
 131      "options": {}
 132    },
 133    {
 134      "id": "anthropic/claude-3.5-haiku-20241022",
 135      "name": "Anthropic: Claude 3.5 Haiku (2024-10-22)",
 136      "cost_per_1m_in": 0.7999999999999999,
 137      "cost_per_1m_out": 4,
 138      "cost_per_1m_in_cached": 1,
 139      "cost_per_1m_out_cached": 0.08,
 140      "context_window": 200000,
 141      "default_max_tokens": 4096,
 142      "can_reason": false,
 143      "supports_attachments": true,
 144      "options": {}
 145    },
 146    {
 147      "id": "anthropic/claude-3.5-sonnet",
 148      "name": "Anthropic: Claude 3.5 Sonnet",
 149      "cost_per_1m_in": 3,
 150      "cost_per_1m_out": 15,
 151      "cost_per_1m_in_cached": 3.75,
 152      "cost_per_1m_out_cached": 0.3,
 153      "context_window": 200000,
 154      "default_max_tokens": 4096,
 155      "can_reason": false,
 156      "supports_attachments": true,
 157      "options": {}
 158    },
 159    {
 160      "id": "anthropic/claude-3.5-sonnet-20240620",
 161      "name": "Anthropic: Claude 3.5 Sonnet (2024-06-20)",
 162      "cost_per_1m_in": 3,
 163      "cost_per_1m_out": 15,
 164      "cost_per_1m_in_cached": 3.75,
 165      "cost_per_1m_out_cached": 0.3,
 166      "context_window": 200000,
 167      "default_max_tokens": 4096,
 168      "can_reason": false,
 169      "supports_attachments": true,
 170      "options": {}
 171    },
 172    {
 173      "id": "anthropic/claude-3.7-sonnet",
 174      "name": "Anthropic: Claude 3.7 Sonnet",
 175      "cost_per_1m_in": 3,
 176      "cost_per_1m_out": 15,
 177      "cost_per_1m_in_cached": 3.75,
 178      "cost_per_1m_out_cached": 0.3,
 179      "context_window": 200000,
 180      "default_max_tokens": 32000,
 181      "can_reason": true,
 182      "reasoning_levels": [
 183        "low",
 184        "medium",
 185        "high"
 186      ],
 187      "default_reasoning_effort": "medium",
 188      "supports_attachments": true,
 189      "options": {}
 190    },
 191    {
 192      "id": "anthropic/claude-3.7-sonnet:thinking",
 193      "name": "Anthropic: Claude 3.7 Sonnet (thinking)",
 194      "cost_per_1m_in": 3,
 195      "cost_per_1m_out": 15,
 196      "cost_per_1m_in_cached": 3.75,
 197      "cost_per_1m_out_cached": 0.3,
 198      "context_window": 200000,
 199      "default_max_tokens": 32000,
 200      "can_reason": true,
 201      "reasoning_levels": [
 202        "low",
 203        "medium",
 204        "high"
 205      ],
 206      "default_reasoning_effort": "medium",
 207      "supports_attachments": true,
 208      "options": {}
 209    },
 210    {
 211      "id": "anthropic/claude-haiku-4.5",
 212      "name": "Anthropic: Claude Haiku 4.5",
 213      "cost_per_1m_in": 1,
 214      "cost_per_1m_out": 5,
 215      "cost_per_1m_in_cached": 1.25,
 216      "cost_per_1m_out_cached": 0.09999999999999999,
 217      "context_window": 200000,
 218      "default_max_tokens": 32000,
 219      "can_reason": true,
 220      "reasoning_levels": [
 221        "low",
 222        "medium",
 223        "high"
 224      ],
 225      "default_reasoning_effort": "medium",
 226      "supports_attachments": true,
 227      "options": {}
 228    },
 229    {
 230      "id": "anthropic/claude-opus-4",
 231      "name": "Anthropic: Claude Opus 4",
 232      "cost_per_1m_in": 15,
 233      "cost_per_1m_out": 75,
 234      "cost_per_1m_in_cached": 18.75,
 235      "cost_per_1m_out_cached": 1.5,
 236      "context_window": 200000,
 237      "default_max_tokens": 16000,
 238      "can_reason": true,
 239      "reasoning_levels": [
 240        "low",
 241        "medium",
 242        "high"
 243      ],
 244      "default_reasoning_effort": "medium",
 245      "supports_attachments": true,
 246      "options": {}
 247    },
 248    {
 249      "id": "anthropic/claude-opus-4.1",
 250      "name": "Anthropic: Claude Opus 4.1",
 251      "cost_per_1m_in": 15,
 252      "cost_per_1m_out": 75,
 253      "cost_per_1m_in_cached": 18.75,
 254      "cost_per_1m_out_cached": 1.5,
 255      "context_window": 200000,
 256      "default_max_tokens": 16000,
 257      "can_reason": true,
 258      "reasoning_levels": [
 259        "low",
 260        "medium",
 261        "high"
 262      ],
 263      "default_reasoning_effort": "medium",
 264      "supports_attachments": true,
 265      "options": {}
 266    },
 267    {
 268      "id": "anthropic/claude-sonnet-4",
 269      "name": "Anthropic: Claude Sonnet 4",
 270      "cost_per_1m_in": 3,
 271      "cost_per_1m_out": 15,
 272      "cost_per_1m_in_cached": 3.75,
 273      "cost_per_1m_out_cached": 0.3,
 274      "context_window": 1000000,
 275      "default_max_tokens": 32000,
 276      "can_reason": true,
 277      "reasoning_levels": [
 278        "low",
 279        "medium",
 280        "high"
 281      ],
 282      "default_reasoning_effort": "medium",
 283      "supports_attachments": true,
 284      "options": {}
 285    },
 286    {
 287      "id": "anthropic/claude-sonnet-4.5",
 288      "name": "Anthropic: Claude Sonnet 4.5",
 289      "cost_per_1m_in": 3,
 290      "cost_per_1m_out": 15,
 291      "cost_per_1m_in_cached": 0,
 292      "cost_per_1m_out_cached": 0,
 293      "context_window": 1000000,
 294      "default_max_tokens": 32000,
 295      "can_reason": true,
 296      "reasoning_levels": [
 297        "low",
 298        "medium",
 299        "high"
 300      ],
 301      "default_reasoning_effort": "medium",
 302      "supports_attachments": true,
 303      "options": {}
 304    },
 305    {
 306      "id": "arcee-ai/virtuoso-large",
 307      "name": "Arcee AI: Virtuoso Large",
 308      "cost_per_1m_in": 0.75,
 309      "cost_per_1m_out": 1.2,
 310      "cost_per_1m_in_cached": 0,
 311      "cost_per_1m_out_cached": 0,
 312      "context_window": 131072,
 313      "default_max_tokens": 32000,
 314      "can_reason": false,
 315      "supports_attachments": false,
 316      "options": {}
 317    },
 318    {
 319      "id": "baidu/ernie-4.5-21b-a3b",
 320      "name": "Baidu: ERNIE 4.5 21B A3B",
 321      "cost_per_1m_in": 0.07,
 322      "cost_per_1m_out": 0.28,
 323      "cost_per_1m_in_cached": 0,
 324      "cost_per_1m_out_cached": 0,
 325      "context_window": 120000,
 326      "default_max_tokens": 4000,
 327      "can_reason": false,
 328      "supports_attachments": false,
 329      "options": {}
 330    },
 331    {
 332      "id": "baidu/ernie-4.5-vl-28b-a3b",
 333      "name": "Baidu: ERNIE 4.5 VL 28B A3B",
 334      "cost_per_1m_in": 0.14,
 335      "cost_per_1m_out": 0.56,
 336      "cost_per_1m_in_cached": 0,
 337      "cost_per_1m_out_cached": 0,
 338      "context_window": 30000,
 339      "default_max_tokens": 4000,
 340      "can_reason": true,
 341      "reasoning_levels": [
 342        "low",
 343        "medium",
 344        "high"
 345      ],
 346      "default_reasoning_effort": "medium",
 347      "supports_attachments": true,
 348      "options": {}
 349    },
 350    {
 351      "id": "deepcogito/cogito-v2-preview-llama-109b-moe",
 352      "name": "Cogito V2 Preview Llama 109B",
 353      "cost_per_1m_in": 0.18,
 354      "cost_per_1m_out": 0.59,
 355      "cost_per_1m_in_cached": 0,
 356      "cost_per_1m_out_cached": 0,
 357      "context_window": 32767,
 358      "default_max_tokens": 3276,
 359      "can_reason": true,
 360      "reasoning_levels": [
 361        "low",
 362        "medium",
 363        "high"
 364      ],
 365      "default_reasoning_effort": "medium",
 366      "supports_attachments": true,
 367      "options": {}
 368    },
 369    {
 370      "id": "cohere/command-r-08-2024",
 371      "name": "Cohere: Command R (08-2024)",
 372      "cost_per_1m_in": 0.15,
 373      "cost_per_1m_out": 0.6,
 374      "cost_per_1m_in_cached": 0,
 375      "cost_per_1m_out_cached": 0,
 376      "context_window": 128000,
 377      "default_max_tokens": 2000,
 378      "can_reason": false,
 379      "supports_attachments": false,
 380      "options": {}
 381    },
 382    {
 383      "id": "cohere/command-r-plus-08-2024",
 384      "name": "Cohere: Command R+ (08-2024)",
 385      "cost_per_1m_in": 2.5,
 386      "cost_per_1m_out": 10,
 387      "cost_per_1m_in_cached": 0,
 388      "cost_per_1m_out_cached": 0,
 389      "context_window": 128000,
 390      "default_max_tokens": 2000,
 391      "can_reason": false,
 392      "supports_attachments": false,
 393      "options": {}
 394    },
 395    {
 396      "id": "deepcogito/cogito-v2-preview-llama-405b",
 397      "name": "Deep Cogito: Cogito V2 Preview Llama 405B",
 398      "cost_per_1m_in": 3.5,
 399      "cost_per_1m_out": 3.5,
 400      "cost_per_1m_in_cached": 0,
 401      "cost_per_1m_out_cached": 0,
 402      "context_window": 32768,
 403      "default_max_tokens": 3276,
 404      "can_reason": true,
 405      "reasoning_levels": [
 406        "low",
 407        "medium",
 408        "high"
 409      ],
 410      "default_reasoning_effort": "medium",
 411      "supports_attachments": false,
 412      "options": {}
 413    },
 414    {
 415      "id": "deepcogito/cogito-v2-preview-llama-70b",
 416      "name": "Deep Cogito: Cogito V2 Preview Llama 70B",
 417      "cost_per_1m_in": 0.88,
 418      "cost_per_1m_out": 0.88,
 419      "cost_per_1m_in_cached": 0,
 420      "cost_per_1m_out_cached": 0,
 421      "context_window": 32768,
 422      "default_max_tokens": 3276,
 423      "can_reason": true,
 424      "reasoning_levels": [
 425        "low",
 426        "medium",
 427        "high"
 428      ],
 429      "default_reasoning_effort": "medium",
 430      "supports_attachments": false,
 431      "options": {}
 432    },
 433    {
 434      "id": "deepseek/deepseek-chat",
 435      "name": "DeepSeek: DeepSeek V3",
 436      "cost_per_1m_in": 0.39999999999999997,
 437      "cost_per_1m_out": 1.3,
 438      "cost_per_1m_in_cached": 0,
 439      "cost_per_1m_out_cached": 0,
 440      "context_window": 64000,
 441      "default_max_tokens": 8000,
 442      "can_reason": false,
 443      "supports_attachments": false,
 444      "options": {}
 445    },
 446    {
 447      "id": "deepseek/deepseek-chat-v3-0324",
 448      "name": "DeepSeek: DeepSeek V3 0324",
 449      "cost_per_1m_in": 0.5,
 450      "cost_per_1m_out": 1.5,
 451      "cost_per_1m_in_cached": 0,
 452      "cost_per_1m_out_cached": 0,
 453      "context_window": 163840,
 454      "default_max_tokens": 16384,
 455      "can_reason": false,
 456      "supports_attachments": false,
 457      "options": {}
 458    },
 459    {
 460      "id": "deepseek/deepseek-chat-v3-0324:free",
 461      "name": "DeepSeek: DeepSeek V3 0324 (free)",
 462      "cost_per_1m_in": 0,
 463      "cost_per_1m_out": 0,
 464      "cost_per_1m_in_cached": 0,
 465      "cost_per_1m_out_cached": 0,
 466      "context_window": 163840,
 467      "default_max_tokens": 16384,
 468      "can_reason": false,
 469      "supports_attachments": false,
 470      "options": {}
 471    },
 472    {
 473      "id": "deepseek/deepseek-chat-v3.1",
 474      "name": "DeepSeek: DeepSeek V3.1",
 475      "cost_per_1m_in": 0.27,
 476      "cost_per_1m_out": 1,
 477      "cost_per_1m_in_cached": 0,
 478      "cost_per_1m_out_cached": 0,
 479      "context_window": 163840,
 480      "default_max_tokens": 16384,
 481      "can_reason": true,
 482      "reasoning_levels": [
 483        "low",
 484        "medium",
 485        "high"
 486      ],
 487      "default_reasoning_effort": "medium",
 488      "supports_attachments": false,
 489      "options": {}
 490    },
 491    {
 492      "id": "deepseek/deepseek-v3.1-terminus",
 493      "name": "DeepSeek: DeepSeek V3.1 Terminus",
 494      "cost_per_1m_in": 0.27,
 495      "cost_per_1m_out": 1,
 496      "cost_per_1m_in_cached": 0,
 497      "cost_per_1m_out_cached": 0,
 498      "context_window": 163840,
 499      "default_max_tokens": 16384,
 500      "can_reason": true,
 501      "reasoning_levels": [
 502        "low",
 503        "medium",
 504        "high"
 505      ],
 506      "default_reasoning_effort": "medium",
 507      "supports_attachments": false,
 508      "options": {}
 509    },
 510    {
 511      "id": "deepseek/deepseek-v3.1-terminus:exacto",
 512      "name": "DeepSeek: DeepSeek V3.1 Terminus (exacto)",
 513      "cost_per_1m_in": 0.27,
 514      "cost_per_1m_out": 1,
 515      "cost_per_1m_in_cached": 0,
 516      "cost_per_1m_out_cached": 0,
 517      "context_window": 163840,
 518      "default_max_tokens": 16384,
 519      "can_reason": true,
 520      "reasoning_levels": [
 521        "low",
 522        "medium",
 523        "high"
 524      ],
 525      "default_reasoning_effort": "medium",
 526      "supports_attachments": false,
 527      "options": {}
 528    },
 529    {
 530      "id": "deepseek/deepseek-v3.2-exp",
 531      "name": "DeepSeek: DeepSeek V3.2 Exp",
 532      "cost_per_1m_in": 0.27,
 533      "cost_per_1m_out": 0.39999999999999997,
 534      "cost_per_1m_in_cached": 0,
 535      "cost_per_1m_out_cached": 0,
 536      "context_window": 163840,
 537      "default_max_tokens": 16384,
 538      "can_reason": true,
 539      "reasoning_levels": [
 540        "low",
 541        "medium",
 542        "high"
 543      ],
 544      "default_reasoning_effort": "medium",
 545      "supports_attachments": false,
 546      "options": {}
 547    },
 548    {
 549      "id": "deepseek/deepseek-r1",
 550      "name": "DeepSeek: R1",
 551      "cost_per_1m_in": 0.7,
 552      "cost_per_1m_out": 2.4,
 553      "cost_per_1m_in_cached": 0,
 554      "cost_per_1m_out_cached": 0,
 555      "context_window": 163840,
 556      "default_max_tokens": 81920,
 557      "can_reason": true,
 558      "reasoning_levels": [
 559        "low",
 560        "medium",
 561        "high"
 562      ],
 563      "default_reasoning_effort": "medium",
 564      "supports_attachments": false,
 565      "options": {}
 566    },
 567    {
 568      "id": "deepseek/deepseek-r1-0528",
 569      "name": "DeepSeek: R1 0528",
 570      "cost_per_1m_in": 0.39999999999999997,
 571      "cost_per_1m_out": 1.75,
 572      "cost_per_1m_in_cached": 0,
 573      "cost_per_1m_out_cached": 0,
 574      "context_window": 163840,
 575      "default_max_tokens": 81920,
 576      "can_reason": true,
 577      "reasoning_levels": [
 578        "low",
 579        "medium",
 580        "high"
 581      ],
 582      "default_reasoning_effort": "medium",
 583      "supports_attachments": false,
 584      "options": {}
 585    },
 586    {
 587      "id": "deepseek/deepseek-r1-distill-llama-70b",
 588      "name": "DeepSeek: R1 Distill Llama 70B",
 589      "cost_per_1m_in": 0.03,
 590      "cost_per_1m_out": 0.13,
 591      "cost_per_1m_in_cached": 0,
 592      "cost_per_1m_out_cached": 0,
 593      "context_window": 131072,
 594      "default_max_tokens": 65536,
 595      "can_reason": true,
 596      "reasoning_levels": [
 597        "low",
 598        "medium",
 599        "high"
 600      ],
 601      "default_reasoning_effort": "medium",
 602      "supports_attachments": false,
 603      "options": {}
 604    },
 605    {
 606      "id": "google/gemini-2.0-flash-001",
 607      "name": "Google: Gemini 2.0 Flash",
 608      "cost_per_1m_in": 0.09999999999999999,
 609      "cost_per_1m_out": 0.39999999999999997,
 610      "cost_per_1m_in_cached": 0.18330000000000002,
 611      "cost_per_1m_out_cached": 0.024999999999999998,
 612      "context_window": 1048576,
 613      "default_max_tokens": 4096,
 614      "can_reason": false,
 615      "supports_attachments": true,
 616      "options": {}
 617    },
 618    {
 619      "id": "google/gemini-2.0-flash-exp:free",
 620      "name": "Google: Gemini 2.0 Flash Experimental (free)",
 621      "cost_per_1m_in": 0,
 622      "cost_per_1m_out": 0,
 623      "cost_per_1m_in_cached": 0,
 624      "cost_per_1m_out_cached": 0,
 625      "context_window": 1048576,
 626      "default_max_tokens": 4096,
 627      "can_reason": false,
 628      "supports_attachments": true,
 629      "options": {}
 630    },
 631    {
 632      "id": "google/gemini-2.0-flash-lite-001",
 633      "name": "Google: Gemini 2.0 Flash Lite",
 634      "cost_per_1m_in": 0.075,
 635      "cost_per_1m_out": 0.3,
 636      "cost_per_1m_in_cached": 0,
 637      "cost_per_1m_out_cached": 0,
 638      "context_window": 1048576,
 639      "default_max_tokens": 4096,
 640      "can_reason": false,
 641      "supports_attachments": true,
 642      "options": {}
 643    },
 644    {
 645      "id": "google/gemini-2.5-flash",
 646      "name": "Google: Gemini 2.5 Flash",
 647      "cost_per_1m_in": 0.3,
 648      "cost_per_1m_out": 2.5,
 649      "cost_per_1m_in_cached": 0.3833,
 650      "cost_per_1m_out_cached": 0.03,
 651      "context_window": 1048576,
 652      "default_max_tokens": 32767,
 653      "can_reason": true,
 654      "reasoning_levels": [
 655        "low",
 656        "medium",
 657        "high"
 658      ],
 659      "default_reasoning_effort": "medium",
 660      "supports_attachments": true,
 661      "options": {}
 662    },
 663    {
 664      "id": "google/gemini-2.5-flash-lite",
 665      "name": "Google: Gemini 2.5 Flash Lite",
 666      "cost_per_1m_in": 0.09999999999999999,
 667      "cost_per_1m_out": 0.39999999999999997,
 668      "cost_per_1m_in_cached": 0.18330000000000002,
 669      "cost_per_1m_out_cached": 0.01,
 670      "context_window": 1048576,
 671      "default_max_tokens": 32767,
 672      "can_reason": true,
 673      "reasoning_levels": [
 674        "low",
 675        "medium",
 676        "high"
 677      ],
 678      "default_reasoning_effort": "medium",
 679      "supports_attachments": true,
 680      "options": {}
 681    },
 682    {
 683      "id": "google/gemini-2.5-flash-lite-preview-06-17",
 684      "name": "Google: Gemini 2.5 Flash Lite Preview 06-17",
 685      "cost_per_1m_in": 0.09999999999999999,
 686      "cost_per_1m_out": 0.39999999999999997,
 687      "cost_per_1m_in_cached": 0.18330000000000002,
 688      "cost_per_1m_out_cached": 0.024999999999999998,
 689      "context_window": 1048576,
 690      "default_max_tokens": 32767,
 691      "can_reason": true,
 692      "reasoning_levels": [
 693        "low",
 694        "medium",
 695        "high"
 696      ],
 697      "default_reasoning_effort": "medium",
 698      "supports_attachments": true,
 699      "options": {}
 700    },
 701    {
 702      "id": "google/gemini-2.5-flash-lite-preview-09-2025",
 703      "name": "Google: Gemini 2.5 Flash Lite Preview 09-2025",
 704      "cost_per_1m_in": 0.09999999999999999,
 705      "cost_per_1m_out": 0.39999999999999997,
 706      "cost_per_1m_in_cached": 0,
 707      "cost_per_1m_out_cached": 0,
 708      "context_window": 1048576,
 709      "default_max_tokens": 32768,
 710      "can_reason": true,
 711      "reasoning_levels": [
 712        "low",
 713        "medium",
 714        "high"
 715      ],
 716      "default_reasoning_effort": "medium",
 717      "supports_attachments": true,
 718      "options": {}
 719    },
 720    {
 721      "id": "google/gemini-2.5-flash-preview-09-2025",
 722      "name": "Google: Gemini 2.5 Flash Preview 09-2025",
 723      "cost_per_1m_in": 0.3,
 724      "cost_per_1m_out": 2.5,
 725      "cost_per_1m_in_cached": 0.3833,
 726      "cost_per_1m_out_cached": 0.075,
 727      "context_window": 1048576,
 728      "default_max_tokens": 32767,
 729      "can_reason": true,
 730      "reasoning_levels": [
 731        "low",
 732        "medium",
 733        "high"
 734      ],
 735      "default_reasoning_effort": "medium",
 736      "supports_attachments": true,
 737      "options": {}
 738    },
 739    {
 740      "id": "google/gemini-2.5-pro",
 741      "name": "Google: Gemini 2.5 Pro",
 742      "cost_per_1m_in": 1.25,
 743      "cost_per_1m_out": 10,
 744      "cost_per_1m_in_cached": 1.625,
 745      "cost_per_1m_out_cached": 0.125,
 746      "context_window": 1048576,
 747      "default_max_tokens": 32768,
 748      "can_reason": true,
 749      "reasoning_levels": [
 750        "low",
 751        "medium",
 752        "high"
 753      ],
 754      "default_reasoning_effort": "medium",
 755      "supports_attachments": true,
 756      "options": {}
 757    },
 758    {
 759      "id": "google/gemini-2.5-pro-preview-05-06",
 760      "name": "Google: Gemini 2.5 Pro Preview 05-06",
 761      "cost_per_1m_in": 1.25,
 762      "cost_per_1m_out": 10,
 763      "cost_per_1m_in_cached": 1.625,
 764      "cost_per_1m_out_cached": 0.125,
 765      "context_window": 1048576,
 766      "default_max_tokens": 32768,
 767      "can_reason": true,
 768      "reasoning_levels": [
 769        "low",
 770        "medium",
 771        "high"
 772      ],
 773      "default_reasoning_effort": "medium",
 774      "supports_attachments": true,
 775      "options": {}
 776    },
 777    {
 778      "id": "google/gemini-2.5-pro-preview",
 779      "name": "Google: Gemini 2.5 Pro Preview 06-05",
 780      "cost_per_1m_in": 1.25,
 781      "cost_per_1m_out": 10,
 782      "cost_per_1m_in_cached": 1.625,
 783      "cost_per_1m_out_cached": 0.125,
 784      "context_window": 1048576,
 785      "default_max_tokens": 32768,
 786      "can_reason": true,
 787      "reasoning_levels": [
 788        "low",
 789        "medium",
 790        "high"
 791      ],
 792      "default_reasoning_effort": "medium",
 793      "supports_attachments": true,
 794      "options": {}
 795    },
 796    {
 797      "id": "google/gemma-3-27b-it",
 798      "name": "Google: Gemma 3 27B",
 799      "cost_per_1m_in": 0.13,
 800      "cost_per_1m_out": 0.52,
 801      "cost_per_1m_in_cached": 0,
 802      "cost_per_1m_out_cached": 0,
 803      "context_window": 96000,
 804      "default_max_tokens": 48000,
 805      "can_reason": false,
 806      "supports_attachments": true,
 807      "options": {}
 808    },
 809    {
 810      "id": "inception/mercury",
 811      "name": "Inception: Mercury",
 812      "cost_per_1m_in": 0.25,
 813      "cost_per_1m_out": 1,
 814      "cost_per_1m_in_cached": 0,
 815      "cost_per_1m_out_cached": 0,
 816      "context_window": 128000,
 817      "default_max_tokens": 8192,
 818      "can_reason": false,
 819      "supports_attachments": false,
 820      "options": {}
 821    },
 822    {
 823      "id": "inception/mercury-coder",
 824      "name": "Inception: Mercury Coder",
 825      "cost_per_1m_in": 0.25,
 826      "cost_per_1m_out": 1,
 827      "cost_per_1m_in_cached": 0,
 828      "cost_per_1m_out_cached": 0,
 829      "context_window": 128000,
 830      "default_max_tokens": 8192,
 831      "can_reason": false,
 832      "supports_attachments": false,
 833      "options": {}
 834    },
 835    {
 836      "id": "meituan/longcat-flash-chat:free",
 837      "name": "Meituan: LongCat Flash Chat (free)",
 838      "cost_per_1m_in": 0,
 839      "cost_per_1m_out": 0,
 840      "cost_per_1m_in_cached": 0,
 841      "cost_per_1m_out_cached": 0,
 842      "context_window": 131072,
 843      "default_max_tokens": 65536,
 844      "can_reason": false,
 845      "supports_attachments": false,
 846      "options": {}
 847    },
 848    {
 849      "id": "meta-llama/llama-3.1-405b-instruct",
 850      "name": "Meta: Llama 3.1 405B Instruct",
 851      "cost_per_1m_in": 1,
 852      "cost_per_1m_out": 3,
 853      "cost_per_1m_in_cached": 0,
 854      "cost_per_1m_out_cached": 0,
 855      "context_window": 131072,
 856      "default_max_tokens": 13107,
 857      "can_reason": false,
 858      "supports_attachments": false,
 859      "options": {}
 860    },
 861    {
 862      "id": "meta-llama/llama-3.1-70b-instruct",
 863      "name": "Meta: Llama 3.1 70B Instruct",
 864      "cost_per_1m_in": 0.39999999999999997,
 865      "cost_per_1m_out": 0.39999999999999997,
 866      "cost_per_1m_in_cached": 0,
 867      "cost_per_1m_out_cached": 0,
 868      "context_window": 131072,
 869      "default_max_tokens": 8192,
 870      "can_reason": false,
 871      "supports_attachments": false,
 872      "options": {}
 873    },
 874    {
 875      "id": "meta-llama/llama-3.3-70b-instruct",
 876      "name": "Meta: Llama 3.3 70B Instruct",
 877      "cost_per_1m_in": 0.59,
 878      "cost_per_1m_out": 0.7899999999999999,
 879      "cost_per_1m_in_cached": 0,
 880      "cost_per_1m_out_cached": 0,
 881      "context_window": 131072,
 882      "default_max_tokens": 16384,
 883      "can_reason": false,
 884      "supports_attachments": false,
 885      "options": {}
 886    },
 887    {
 888      "id": "meta-llama/llama-3.3-70b-instruct:free",
 889      "name": "Meta: Llama 3.3 70B Instruct (free)",
 890      "cost_per_1m_in": 0,
 891      "cost_per_1m_out": 0,
 892      "cost_per_1m_in_cached": 0,
 893      "cost_per_1m_out_cached": 0,
 894      "context_window": 131072,
 895      "default_max_tokens": 13107,
 896      "can_reason": false,
 897      "supports_attachments": false,
 898      "options": {}
 899    },
 900    {
 901      "id": "meta-llama/llama-3.3-8b-instruct:free",
 902      "name": "Meta: Llama 3.3 8B Instruct (free)",
 903      "cost_per_1m_in": 0,
 904      "cost_per_1m_out": 0,
 905      "cost_per_1m_in_cached": 0,
 906      "cost_per_1m_out_cached": 0,
 907      "context_window": 128000,
 908      "default_max_tokens": 2014,
 909      "can_reason": false,
 910      "supports_attachments": false,
 911      "options": {}
 912    },
 913    {
 914      "id": "meta-llama/llama-4-maverick",
 915      "name": "Meta: Llama 4 Maverick",
 916      "cost_per_1m_in": 0.16999999999999998,
 917      "cost_per_1m_out": 0.85,
 918      "cost_per_1m_in_cached": 0,
 919      "cost_per_1m_out_cached": 0,
 920      "context_window": 1048576,
 921      "default_max_tokens": 4096,
 922      "can_reason": false,
 923      "supports_attachments": true,
 924      "options": {}
 925    },
 926    {
 927      "id": "meta-llama/llama-4-maverick:free",
 928      "name": "Meta: Llama 4 Maverick (free)",
 929      "cost_per_1m_in": 0,
 930      "cost_per_1m_out": 0,
 931      "cost_per_1m_in_cached": 0,
 932      "cost_per_1m_out_cached": 0,
 933      "context_window": 128000,
 934      "default_max_tokens": 2014,
 935      "can_reason": false,
 936      "supports_attachments": true,
 937      "options": {}
 938    },
 939    {
 940      "id": "meta-llama/llama-4-scout",
 941      "name": "Meta: Llama 4 Scout",
 942      "cost_per_1m_in": 0.25,
 943      "cost_per_1m_out": 0.7,
 944      "cost_per_1m_in_cached": 0,
 945      "cost_per_1m_out_cached": 0,
 946      "context_window": 1310720,
 947      "default_max_tokens": 4096,
 948      "can_reason": false,
 949      "supports_attachments": true,
 950      "options": {}
 951    },
 952    {
 953      "id": "meta-llama/llama-4-scout:free",
 954      "name": "Meta: Llama 4 Scout (free)",
 955      "cost_per_1m_in": 0,
 956      "cost_per_1m_out": 0,
 957      "cost_per_1m_in_cached": 0,
 958      "cost_per_1m_out_cached": 0,
 959      "context_window": 128000,
 960      "default_max_tokens": 2014,
 961      "can_reason": false,
 962      "supports_attachments": true,
 963      "options": {}
 964    },
 965    {
 966      "id": "microsoft/phi-3-medium-128k-instruct",
 967      "name": "Microsoft: Phi-3 Medium 128K Instruct",
 968      "cost_per_1m_in": 1,
 969      "cost_per_1m_out": 1,
 970      "cost_per_1m_in_cached": 0,
 971      "cost_per_1m_out_cached": 0,
 972      "context_window": 128000,
 973      "default_max_tokens": 12800,
 974      "can_reason": false,
 975      "supports_attachments": false,
 976      "options": {}
 977    },
 978    {
 979      "id": "microsoft/phi-3-mini-128k-instruct",
 980      "name": "Microsoft: Phi-3 Mini 128K Instruct",
 981      "cost_per_1m_in": 0.09999999999999999,
 982      "cost_per_1m_out": 0.09999999999999999,
 983      "cost_per_1m_in_cached": 0,
 984      "cost_per_1m_out_cached": 0,
 985      "context_window": 128000,
 986      "default_max_tokens": 12800,
 987      "can_reason": false,
 988      "supports_attachments": false,
 989      "options": {}
 990    },
 991    {
 992      "id": "microsoft/phi-3.5-mini-128k-instruct",
 993      "name": "Microsoft: Phi-3.5 Mini 128K Instruct",
 994      "cost_per_1m_in": 0.09999999999999999,
 995      "cost_per_1m_out": 0.09999999999999999,
 996      "cost_per_1m_in_cached": 0,
 997      "cost_per_1m_out_cached": 0,
 998      "context_window": 128000,
 999      "default_max_tokens": 12800,
1000      "can_reason": false,
1001      "supports_attachments": false,
1002      "options": {}
1003    },
1004    {
1005      "id": "minimax/minimax-m2:free",
1006      "name": "MiniMax: MiniMax M2 (free)",
1007      "cost_per_1m_in": 0,
1008      "cost_per_1m_out": 0,
1009      "cost_per_1m_in_cached": 0,
1010      "cost_per_1m_out_cached": 0,
1011      "context_window": 204800,
1012      "default_max_tokens": 65536,
1013      "can_reason": true,
1014      "reasoning_levels": [
1015        "low",
1016        "medium",
1017        "high"
1018      ],
1019      "default_reasoning_effort": "medium",
1020      "supports_attachments": false,
1021      "options": {}
1022    },
1023    {
1024      "id": "mistralai/mistral-large",
1025      "name": "Mistral Large",
1026      "cost_per_1m_in": 2,
1027      "cost_per_1m_out": 6,
1028      "cost_per_1m_in_cached": 0,
1029      "cost_per_1m_out_cached": 0,
1030      "context_window": 128000,
1031      "default_max_tokens": 12800,
1032      "can_reason": false,
1033      "supports_attachments": false,
1034      "options": {}
1035    },
1036    {
1037      "id": "mistralai/mistral-large-2407",
1038      "name": "Mistral Large 2407",
1039      "cost_per_1m_in": 2,
1040      "cost_per_1m_out": 6,
1041      "cost_per_1m_in_cached": 0,
1042      "cost_per_1m_out_cached": 0,
1043      "context_window": 131072,
1044      "default_max_tokens": 13107,
1045      "can_reason": false,
1046      "supports_attachments": false,
1047      "options": {}
1048    },
1049    {
1050      "id": "mistralai/mistral-large-2411",
1051      "name": "Mistral Large 2411",
1052      "cost_per_1m_in": 2,
1053      "cost_per_1m_out": 6,
1054      "cost_per_1m_in_cached": 0,
1055      "cost_per_1m_out_cached": 0,
1056      "context_window": 131072,
1057      "default_max_tokens": 13107,
1058      "can_reason": false,
1059      "supports_attachments": false,
1060      "options": {}
1061    },
1062    {
1063      "id": "mistralai/mistral-small",
1064      "name": "Mistral Small",
1065      "cost_per_1m_in": 0.19999999999999998,
1066      "cost_per_1m_out": 0.6,
1067      "cost_per_1m_in_cached": 0,
1068      "cost_per_1m_out_cached": 0,
1069      "context_window": 32768,
1070      "default_max_tokens": 3276,
1071      "can_reason": false,
1072      "supports_attachments": false,
1073      "options": {}
1074    },
1075    {
1076      "id": "mistralai/mistral-tiny",
1077      "name": "Mistral Tiny",
1078      "cost_per_1m_in": 0.25,
1079      "cost_per_1m_out": 0.25,
1080      "cost_per_1m_in_cached": 0,
1081      "cost_per_1m_out_cached": 0,
1082      "context_window": 32768,
1083      "default_max_tokens": 3276,
1084      "can_reason": false,
1085      "supports_attachments": false,
1086      "options": {}
1087    },
1088    {
1089      "id": "mistralai/codestral-2501",
1090      "name": "Mistral: Codestral 2501",
1091      "cost_per_1m_in": 0.3,
1092      "cost_per_1m_out": 0.8999999999999999,
1093      "cost_per_1m_in_cached": 0,
1094      "cost_per_1m_out_cached": 0,
1095      "context_window": 262144,
1096      "default_max_tokens": 26214,
1097      "can_reason": false,
1098      "supports_attachments": false,
1099      "options": {}
1100    },
1101    {
1102      "id": "mistralai/codestral-2508",
1103      "name": "Mistral: Codestral 2508",
1104      "cost_per_1m_in": 0.3,
1105      "cost_per_1m_out": 0.8999999999999999,
1106      "cost_per_1m_in_cached": 0,
1107      "cost_per_1m_out_cached": 0,
1108      "context_window": 256000,
1109      "default_max_tokens": 25600,
1110      "can_reason": false,
1111      "supports_attachments": false,
1112      "options": {}
1113    },
1114    {
1115      "id": "mistralai/devstral-medium",
1116      "name": "Mistral: Devstral Medium",
1117      "cost_per_1m_in": 0.39999999999999997,
1118      "cost_per_1m_out": 2,
1119      "cost_per_1m_in_cached": 0,
1120      "cost_per_1m_out_cached": 0,
1121      "context_window": 131072,
1122      "default_max_tokens": 13107,
1123      "can_reason": false,
1124      "supports_attachments": false,
1125      "options": {}
1126    },
1127    {
1128      "id": "mistralai/devstral-small-2505",
1129      "name": "Mistral: Devstral Small 2505",
1130      "cost_per_1m_in": 0.049999999999999996,
1131      "cost_per_1m_out": 0.22,
1132      "cost_per_1m_in_cached": 0,
1133      "cost_per_1m_out_cached": 0,
1134      "context_window": 131072,
1135      "default_max_tokens": 65536,
1136      "can_reason": false,
1137      "supports_attachments": false,
1138      "options": {}
1139    },
1140    {
1141      "id": "mistralai/devstral-small-2505:free",
1142      "name": "Mistral: Devstral Small 2505 (free)",
1143      "cost_per_1m_in": 0,
1144      "cost_per_1m_out": 0,
1145      "cost_per_1m_in_cached": 0,
1146      "cost_per_1m_out_cached": 0,
1147      "context_window": 32768,
1148      "default_max_tokens": 3276,
1149      "can_reason": false,
1150      "supports_attachments": false,
1151      "options": {}
1152    },
1153    {
1154      "id": "mistralai/magistral-medium-2506",
1155      "name": "Mistral: Magistral Medium 2506",
1156      "cost_per_1m_in": 2,
1157      "cost_per_1m_out": 5,
1158      "cost_per_1m_in_cached": 0,
1159      "cost_per_1m_out_cached": 0,
1160      "context_window": 40960,
1161      "default_max_tokens": 20000,
1162      "can_reason": true,
1163      "reasoning_levels": [
1164        "low",
1165        "medium",
1166        "high"
1167      ],
1168      "default_reasoning_effort": "medium",
1169      "supports_attachments": false,
1170      "options": {}
1171    },
1172    {
1173      "id": "mistralai/magistral-medium-2506:thinking",
1174      "name": "Mistral: Magistral Medium 2506 (thinking)",
1175      "cost_per_1m_in": 2,
1176      "cost_per_1m_out": 5,
1177      "cost_per_1m_in_cached": 0,
1178      "cost_per_1m_out_cached": 0,
1179      "context_window": 40960,
1180      "default_max_tokens": 20000,
1181      "can_reason": true,
1182      "reasoning_levels": [
1183        "low",
1184        "medium",
1185        "high"
1186      ],
1187      "default_reasoning_effort": "medium",
1188      "supports_attachments": false,
1189      "options": {}
1190    },
1191    {
1192      "id": "mistralai/magistral-small-2506",
1193      "name": "Mistral: Magistral Small 2506",
1194      "cost_per_1m_in": 0.5,
1195      "cost_per_1m_out": 1.5,
1196      "cost_per_1m_in_cached": 0,
1197      "cost_per_1m_out_cached": 0,
1198      "context_window": 40000,
1199      "default_max_tokens": 20000,
1200      "can_reason": true,
1201      "reasoning_levels": [
1202        "low",
1203        "medium",
1204        "high"
1205      ],
1206      "default_reasoning_effort": "medium",
1207      "supports_attachments": false,
1208      "options": {}
1209    },
1210    {
1211      "id": "mistralai/ministral-8b",
1212      "name": "Mistral: Ministral 8B",
1213      "cost_per_1m_in": 0.09999999999999999,
1214      "cost_per_1m_out": 0.09999999999999999,
1215      "cost_per_1m_in_cached": 0,
1216      "cost_per_1m_out_cached": 0,
1217      "context_window": 131072,
1218      "default_max_tokens": 13107,
1219      "can_reason": false,
1220      "supports_attachments": false,
1221      "options": {}
1222    },
1223    {
1224      "id": "mistralai/mistral-7b-instruct",
1225      "name": "Mistral: Mistral 7B Instruct",
1226      "cost_per_1m_in": 0.028,
1227      "cost_per_1m_out": 0.054,
1228      "cost_per_1m_in_cached": 0,
1229      "cost_per_1m_out_cached": 0,
1230      "context_window": 32768,
1231      "default_max_tokens": 8192,
1232      "can_reason": false,
1233      "supports_attachments": false,
1234      "options": {}
1235    },
1236    {
1237      "id": "mistralai/mistral-7b-instruct:free",
1238      "name": "Mistral: Mistral 7B Instruct (free)",
1239      "cost_per_1m_in": 0,
1240      "cost_per_1m_out": 0,
1241      "cost_per_1m_in_cached": 0,
1242      "cost_per_1m_out_cached": 0,
1243      "context_window": 32768,
1244      "default_max_tokens": 8192,
1245      "can_reason": false,
1246      "supports_attachments": false,
1247      "options": {}
1248    },
1249    {
1250      "id": "mistralai/mistral-7b-instruct-v0.3",
1251      "name": "Mistral: Mistral 7B Instruct v0.3",
1252      "cost_per_1m_in": 0.028,
1253      "cost_per_1m_out": 0.054,
1254      "cost_per_1m_in_cached": 0,
1255      "cost_per_1m_out_cached": 0,
1256      "context_window": 32768,
1257      "default_max_tokens": 8192,
1258      "can_reason": false,
1259      "supports_attachments": false,
1260      "options": {}
1261    },
1262    {
1263      "id": "mistralai/mistral-medium-3",
1264      "name": "Mistral: Mistral Medium 3",
1265      "cost_per_1m_in": 0.39999999999999997,
1266      "cost_per_1m_out": 2,
1267      "cost_per_1m_in_cached": 0,
1268      "cost_per_1m_out_cached": 0,
1269      "context_window": 131072,
1270      "default_max_tokens": 13107,
1271      "can_reason": false,
1272      "supports_attachments": true,
1273      "options": {}
1274    },
1275    {
1276      "id": "mistralai/mistral-medium-3.1",
1277      "name": "Mistral: Mistral Medium 3.1",
1278      "cost_per_1m_in": 0.39999999999999997,
1279      "cost_per_1m_out": 2,
1280      "cost_per_1m_in_cached": 0,
1281      "cost_per_1m_out_cached": 0,
1282      "context_window": 131072,
1283      "default_max_tokens": 13107,
1284      "can_reason": false,
1285      "supports_attachments": true,
1286      "options": {}
1287    },
1288    {
1289      "id": "mistralai/mistral-nemo",
1290      "name": "Mistral: Mistral Nemo",
1291      "cost_per_1m_in": 0.15,
1292      "cost_per_1m_out": 0.15,
1293      "cost_per_1m_in_cached": 0,
1294      "cost_per_1m_out_cached": 0,
1295      "context_window": 131072,
1296      "default_max_tokens": 13107,
1297      "can_reason": false,
1298      "supports_attachments": false,
1299      "options": {}
1300    },
1301    {
1302      "id": "mistralai/mistral-small-24b-instruct-2501",
1303      "name": "Mistral: Mistral Small 3",
1304      "cost_per_1m_in": 0.09999999999999999,
1305      "cost_per_1m_out": 0.3,
1306      "cost_per_1m_in_cached": 0,
1307      "cost_per_1m_out_cached": 0,
1308      "context_window": 32768,
1309      "default_max_tokens": 3276,
1310      "can_reason": false,
1311      "supports_attachments": false,
1312      "options": {}
1313    },
1314    {
1315      "id": "mistralai/mistral-small-3.1-24b-instruct",
1316      "name": "Mistral: Mistral Small 3.1 24B",
1317      "cost_per_1m_in": 0.09999999999999999,
1318      "cost_per_1m_out": 0.3,
1319      "cost_per_1m_in_cached": 0,
1320      "cost_per_1m_out_cached": 0,
1321      "context_window": 131072,
1322      "default_max_tokens": 13107,
1323      "can_reason": false,
1324      "supports_attachments": true,
1325      "options": {}
1326    },
1327    {
1328      "id": "mistralai/mistral-small-3.1-24b-instruct:free",
1329      "name": "Mistral: Mistral Small 3.1 24B (free)",
1330      "cost_per_1m_in": 0,
1331      "cost_per_1m_out": 0,
1332      "cost_per_1m_in_cached": 0,
1333      "cost_per_1m_out_cached": 0,
1334      "context_window": 96000,
1335      "default_max_tokens": 48000,
1336      "can_reason": false,
1337      "supports_attachments": true,
1338      "options": {}
1339    },
1340    {
1341      "id": "mistralai/mistral-small-3.2-24b-instruct",
1342      "name": "Mistral: Mistral Small 3.2 24B",
1343      "cost_per_1m_in": 0.06,
1344      "cost_per_1m_out": 0.18,
1345      "cost_per_1m_in_cached": 0,
1346      "cost_per_1m_out_cached": 0,
1347      "context_window": 131072,
1348      "default_max_tokens": 65536,
1349      "can_reason": false,
1350      "supports_attachments": true,
1351      "options": {}
1352    },
1353    {
1354      "id": "mistralai/mistral-small-3.2-24b-instruct:free",
1355      "name": "Mistral: Mistral Small 3.2 24B (free)",
1356      "cost_per_1m_in": 0,
1357      "cost_per_1m_out": 0,
1358      "cost_per_1m_in_cached": 0,
1359      "cost_per_1m_out_cached": 0,
1360      "context_window": 131072,
1361      "default_max_tokens": 13107,
1362      "can_reason": false,
1363      "supports_attachments": true,
1364      "options": {}
1365    },
1366    {
1367      "id": "mistralai/mixtral-8x22b-instruct",
1368      "name": "Mistral: Mixtral 8x22B Instruct",
1369      "cost_per_1m_in": 2,
1370      "cost_per_1m_out": 6,
1371      "cost_per_1m_in_cached": 0,
1372      "cost_per_1m_out_cached": 0,
1373      "context_window": 65536,
1374      "default_max_tokens": 6553,
1375      "can_reason": false,
1376      "supports_attachments": false,
1377      "options": {}
1378    },
1379    {
1380      "id": "mistralai/mixtral-8x7b-instruct",
1381      "name": "Mistral: Mixtral 8x7B Instruct",
1382      "cost_per_1m_in": 0.54,
1383      "cost_per_1m_out": 0.54,
1384      "cost_per_1m_in_cached": 0,
1385      "cost_per_1m_out_cached": 0,
1386      "context_window": 32768,
1387      "default_max_tokens": 8192,
1388      "can_reason": false,
1389      "supports_attachments": false,
1390      "options": {}
1391    },
1392    {
1393      "id": "mistralai/pixtral-12b",
1394      "name": "Mistral: Pixtral 12B",
1395      "cost_per_1m_in": 0.15,
1396      "cost_per_1m_out": 0.15,
1397      "cost_per_1m_in_cached": 0,
1398      "cost_per_1m_out_cached": 0,
1399      "context_window": 131072,
1400      "default_max_tokens": 13107,
1401      "can_reason": false,
1402      "supports_attachments": true,
1403      "options": {}
1404    },
1405    {
1406      "id": "mistralai/pixtral-large-2411",
1407      "name": "Mistral: Pixtral Large 2411",
1408      "cost_per_1m_in": 2,
1409      "cost_per_1m_out": 6,
1410      "cost_per_1m_in_cached": 0,
1411      "cost_per_1m_out_cached": 0,
1412      "context_window": 131072,
1413      "default_max_tokens": 13107,
1414      "can_reason": false,
1415      "supports_attachments": true,
1416      "options": {}
1417    },
1418    {
1419      "id": "mistralai/mistral-saba",
1420      "name": "Mistral: Saba",
1421      "cost_per_1m_in": 0.19999999999999998,
1422      "cost_per_1m_out": 0.6,
1423      "cost_per_1m_in_cached": 0,
1424      "cost_per_1m_out_cached": 0,
1425      "context_window": 32768,
1426      "default_max_tokens": 3276,
1427      "can_reason": false,
1428      "supports_attachments": false,
1429      "options": {}
1430    },
1431    {
1432      "id": "moonshotai/kimi-k2",
1433      "name": "MoonshotAI: Kimi K2 0711",
1434      "cost_per_1m_in": 0.5,
1435      "cost_per_1m_out": 2.4,
1436      "cost_per_1m_in_cached": 0,
1437      "cost_per_1m_out_cached": 0,
1438      "context_window": 131072,
1439      "default_max_tokens": 13107,
1440      "can_reason": false,
1441      "supports_attachments": false,
1442      "options": {}
1443    },
1444    {
1445      "id": "moonshotai/kimi-k2-0905",
1446      "name": "MoonshotAI: Kimi K2 0905",
1447      "cost_per_1m_in": 0.5,
1448      "cost_per_1m_out": 2,
1449      "cost_per_1m_in_cached": 0,
1450      "cost_per_1m_out_cached": 0,
1451      "context_window": 262144,
1452      "default_max_tokens": 26214,
1453      "can_reason": false,
1454      "supports_attachments": false,
1455      "options": {}
1456    },
1457    {
1458      "id": "moonshotai/kimi-k2-0905:exacto",
1459      "name": "MoonshotAI: Kimi K2 0905 (exacto)",
1460      "cost_per_1m_in": 1,
1461      "cost_per_1m_out": 3,
1462      "cost_per_1m_in_cached": 0,
1463      "cost_per_1m_out_cached": 0,
1464      "context_window": 262144,
1465      "default_max_tokens": 8192,
1466      "can_reason": false,
1467      "supports_attachments": false,
1468      "options": {}
1469    },
1470    {
1471      "id": "nvidia/llama-3.1-nemotron-70b-instruct",
1472      "name": "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
1473      "cost_per_1m_in": 0.6,
1474      "cost_per_1m_out": 0.6,
1475      "cost_per_1m_in_cached": 0,
1476      "cost_per_1m_out_cached": 0,
1477      "context_window": 131072,
1478      "default_max_tokens": 8192,
1479      "can_reason": false,
1480      "supports_attachments": false,
1481      "options": {}
1482    },
1483    {
1484      "id": "nvidia/llama-3.3-nemotron-super-49b-v1.5",
1485      "name": "NVIDIA: Llama 3.3 Nemotron Super 49B V1.5",
1486      "cost_per_1m_in": 0.09999999999999999,
1487      "cost_per_1m_out": 0.39999999999999997,
1488      "cost_per_1m_in_cached": 0,
1489      "cost_per_1m_out_cached": 0,
1490      "context_window": 131072,
1491      "default_max_tokens": 13107,
1492      "can_reason": true,
1493      "reasoning_levels": [
1494        "low",
1495        "medium",
1496        "high"
1497      ],
1498      "default_reasoning_effort": "medium",
1499      "supports_attachments": false,
1500      "options": {}
1501    },
1502    {
1503      "id": "nvidia/nemotron-nano-9b-v2",
1504      "name": "NVIDIA: Nemotron Nano 9B V2",
1505      "cost_per_1m_in": 0.04,
1506      "cost_per_1m_out": 0.16,
1507      "cost_per_1m_in_cached": 0,
1508      "cost_per_1m_out_cached": 0,
1509      "context_window": 131072,
1510      "default_max_tokens": 13107,
1511      "can_reason": true,
1512      "reasoning_levels": [
1513        "low",
1514        "medium",
1515        "high"
1516      ],
1517      "default_reasoning_effort": "medium",
1518      "supports_attachments": false,
1519      "options": {}
1520    },
1521    {
1522      "id": "nvidia/nemotron-nano-9b-v2:free",
1523      "name": "NVIDIA: Nemotron Nano 9B V2 (free)",
1524      "cost_per_1m_in": 0,
1525      "cost_per_1m_out": 0,
1526      "cost_per_1m_in_cached": 0,
1527      "cost_per_1m_out_cached": 0,
1528      "context_window": 128000,
1529      "default_max_tokens": 12800,
1530      "can_reason": true,
1531      "reasoning_levels": [
1532        "low",
1533        "medium",
1534        "high"
1535      ],
1536      "default_reasoning_effort": "medium",
1537      "supports_attachments": false,
1538      "options": {}
1539    },
1540    {
1541      "id": "nousresearch/deephermes-3-llama-3-8b-preview",
1542      "name": "Nous: DeepHermes 3 Llama 3 8B Preview",
1543      "cost_per_1m_in": 0.03,
1544      "cost_per_1m_out": 0.11,
1545      "cost_per_1m_in_cached": 0,
1546      "cost_per_1m_out_cached": 0,
1547      "context_window": 131072,
1548      "default_max_tokens": 65536,
1549      "can_reason": false,
1550      "supports_attachments": false,
1551      "options": {}
1552    },
1553    {
1554      "id": "nousresearch/deephermes-3-mistral-24b-preview",
1555      "name": "Nous: DeepHermes 3 Mistral 24B Preview",
1556      "cost_per_1m_in": 0.15,
1557      "cost_per_1m_out": 0.59,
1558      "cost_per_1m_in_cached": 0,
1559      "cost_per_1m_out_cached": 0,
1560      "context_window": 32768,
1561      "default_max_tokens": 16384,
1562      "can_reason": true,
1563      "reasoning_levels": [
1564        "low",
1565        "medium",
1566        "high"
1567      ],
1568      "default_reasoning_effort": "medium",
1569      "supports_attachments": false,
1570      "options": {}
1571    },
1572    {
1573      "id": "nousresearch/hermes-3-llama-3.1-70b",
1574      "name": "Nous: Hermes 3 70B Instruct",
1575      "cost_per_1m_in": 0.39999999999999997,
1576      "cost_per_1m_out": 0.39999999999999997,
1577      "cost_per_1m_in_cached": 0,
1578      "cost_per_1m_out_cached": 0,
1579      "context_window": 12288,
1580      "default_max_tokens": 1228,
1581      "can_reason": false,
1582      "supports_attachments": false,
1583      "options": {}
1584    },
1585    {
1586      "id": "nousresearch/hermes-4-405b",
1587      "name": "Nous: Hermes 4 405B",
1588      "cost_per_1m_in": 0.3,
1589      "cost_per_1m_out": 1.2,
1590      "cost_per_1m_in_cached": 0,
1591      "cost_per_1m_out_cached": 0,
1592      "context_window": 131072,
1593      "default_max_tokens": 65536,
1594      "can_reason": true,
1595      "reasoning_levels": [
1596        "low",
1597        "medium",
1598        "high"
1599      ],
1600      "default_reasoning_effort": "medium",
1601      "supports_attachments": false,
1602      "options": {}
1603    },
1604    {
1605      "id": "nousresearch/hermes-4-70b",
1606      "name": "Nous: Hermes 4 70B",
1607      "cost_per_1m_in": 0.11,
1608      "cost_per_1m_out": 0.38,
1609      "cost_per_1m_in_cached": 0,
1610      "cost_per_1m_out_cached": 0,
1611      "context_window": 131072,
1612      "default_max_tokens": 65536,
1613      "can_reason": true,
1614      "reasoning_levels": [
1615        "low",
1616        "medium",
1617        "high"
1618      ],
1619      "default_reasoning_effort": "medium",
1620      "supports_attachments": false,
1621      "options": {}
1622    },
1623    {
1624      "id": "openai/codex-mini",
1625      "name": "OpenAI: Codex Mini",
1626      "cost_per_1m_in": 1.5,
1627      "cost_per_1m_out": 6,
1628      "cost_per_1m_in_cached": 0,
1629      "cost_per_1m_out_cached": 0.375,
1630      "context_window": 200000,
1631      "default_max_tokens": 50000,
1632      "can_reason": true,
1633      "reasoning_levels": [
1634        "low",
1635        "medium",
1636        "high"
1637      ],
1638      "default_reasoning_effort": "medium",
1639      "supports_attachments": true,
1640      "options": {}
1641    },
1642    {
1643      "id": "openai/gpt-4-turbo",
1644      "name": "OpenAI: GPT-4 Turbo",
1645      "cost_per_1m_in": 10,
1646      "cost_per_1m_out": 30,
1647      "cost_per_1m_in_cached": 0,
1648      "cost_per_1m_out_cached": 0,
1649      "context_window": 128000,
1650      "default_max_tokens": 2048,
1651      "can_reason": false,
1652      "supports_attachments": true,
1653      "options": {}
1654    },
1655    {
1656      "id": "openai/gpt-4-1106-preview",
1657      "name": "OpenAI: GPT-4 Turbo (older v1106)",
1658      "cost_per_1m_in": 10,
1659      "cost_per_1m_out": 30,
1660      "cost_per_1m_in_cached": 0,
1661      "cost_per_1m_out_cached": 0,
1662      "context_window": 128000,
1663      "default_max_tokens": 2048,
1664      "can_reason": false,
1665      "supports_attachments": false,
1666      "options": {}
1667    },
1668    {
1669      "id": "openai/gpt-4-turbo-preview",
1670      "name": "OpenAI: GPT-4 Turbo Preview",
1671      "cost_per_1m_in": 10,
1672      "cost_per_1m_out": 30,
1673      "cost_per_1m_in_cached": 0,
1674      "cost_per_1m_out_cached": 0,
1675      "context_window": 128000,
1676      "default_max_tokens": 2048,
1677      "can_reason": false,
1678      "supports_attachments": false,
1679      "options": {}
1680    },
1681    {
1682      "id": "openai/gpt-4.1",
1683      "name": "OpenAI: GPT-4.1",
1684      "cost_per_1m_in": 2,
1685      "cost_per_1m_out": 8,
1686      "cost_per_1m_in_cached": 0,
1687      "cost_per_1m_out_cached": 0.5,
1688      "context_window": 1047576,
1689      "default_max_tokens": 104757,
1690      "can_reason": false,
1691      "supports_attachments": true,
1692      "options": {}
1693    },
1694    {
1695      "id": "openai/gpt-4.1-mini",
1696      "name": "OpenAI: GPT-4.1 Mini",
1697      "cost_per_1m_in": 0.39999999999999997,
1698      "cost_per_1m_out": 1.5999999999999999,
1699      "cost_per_1m_in_cached": 0,
1700      "cost_per_1m_out_cached": 0.09999999999999999,
1701      "context_window": 1047576,
1702      "default_max_tokens": 104757,
1703      "can_reason": false,
1704      "supports_attachments": true,
1705      "options": {}
1706    },
1707    {
1708      "id": "openai/gpt-4.1-nano",
1709      "name": "OpenAI: GPT-4.1 Nano",
1710      "cost_per_1m_in": 0.09999999999999999,
1711      "cost_per_1m_out": 0.39999999999999997,
1712      "cost_per_1m_in_cached": 0,
1713      "cost_per_1m_out_cached": 0.03,
1714      "context_window": 1047576,
1715      "default_max_tokens": 104757,
1716      "can_reason": false,
1717      "supports_attachments": true,
1718      "options": {}
1719    },
1720    {
1721      "id": "openai/gpt-4o",
1722      "name": "OpenAI: GPT-4o",
1723      "cost_per_1m_in": 2.5,
1724      "cost_per_1m_out": 10,
1725      "cost_per_1m_in_cached": 0,
1726      "cost_per_1m_out_cached": 0,
1727      "context_window": 128000,
1728      "default_max_tokens": 8192,
1729      "can_reason": false,
1730      "supports_attachments": true,
1731      "options": {}
1732    },
1733    {
1734      "id": "openai/gpt-4o-2024-05-13",
1735      "name": "OpenAI: GPT-4o (2024-05-13)",
1736      "cost_per_1m_in": 5,
1737      "cost_per_1m_out": 15,
1738      "cost_per_1m_in_cached": 0,
1739      "cost_per_1m_out_cached": 0,
1740      "context_window": 128000,
1741      "default_max_tokens": 2048,
1742      "can_reason": false,
1743      "supports_attachments": true,
1744      "options": {}
1745    },
1746    {
1747      "id": "openai/gpt-4o-2024-08-06",
1748      "name": "OpenAI: GPT-4o (2024-08-06)",
1749      "cost_per_1m_in": 2.5,
1750      "cost_per_1m_out": 10,
1751      "cost_per_1m_in_cached": 0,
1752      "cost_per_1m_out_cached": 1.25,
1753      "context_window": 128000,
1754      "default_max_tokens": 8192,
1755      "can_reason": false,
1756      "supports_attachments": true,
1757      "options": {}
1758    },
1759    {
1760      "id": "openai/gpt-4o-2024-11-20",
1761      "name": "OpenAI: GPT-4o (2024-11-20)",
1762      "cost_per_1m_in": 2.5,
1763      "cost_per_1m_out": 10,
1764      "cost_per_1m_in_cached": 0,
1765      "cost_per_1m_out_cached": 1.25,
1766      "context_window": 128000,
1767      "default_max_tokens": 8192,
1768      "can_reason": false,
1769      "supports_attachments": true,
1770      "options": {}
1771    },
1772    {
1773      "id": "openai/gpt-4o:extended",
1774      "name": "OpenAI: GPT-4o (extended)",
1775      "cost_per_1m_in": 6,
1776      "cost_per_1m_out": 18,
1777      "cost_per_1m_in_cached": 0,
1778      "cost_per_1m_out_cached": 0,
1779      "context_window": 128000,
1780      "default_max_tokens": 32000,
1781      "can_reason": false,
1782      "supports_attachments": true,
1783      "options": {}
1784    },
1785    {
1786      "id": "openai/gpt-4o-audio-preview",
1787      "name": "OpenAI: GPT-4o Audio",
1788      "cost_per_1m_in": 2.5,
1789      "cost_per_1m_out": 10,
1790      "cost_per_1m_in_cached": 0,
1791      "cost_per_1m_out_cached": 0,
1792      "context_window": 128000,
1793      "default_max_tokens": 8192,
1794      "can_reason": false,
1795      "supports_attachments": false,
1796      "options": {}
1797    },
1798    {
1799      "id": "openai/gpt-4o-mini",
1800      "name": "OpenAI: GPT-4o-mini",
1801      "cost_per_1m_in": 0.15,
1802      "cost_per_1m_out": 0.6,
1803      "cost_per_1m_in_cached": 0,
1804      "cost_per_1m_out_cached": 0.075,
1805      "context_window": 128000,
1806      "default_max_tokens": 8192,
1807      "can_reason": false,
1808      "supports_attachments": true,
1809      "options": {}
1810    },
1811    {
1812      "id": "openai/gpt-4o-mini-2024-07-18",
1813      "name": "OpenAI: GPT-4o-mini (2024-07-18)",
1814      "cost_per_1m_in": 0.15,
1815      "cost_per_1m_out": 0.6,
1816      "cost_per_1m_in_cached": 0,
1817      "cost_per_1m_out_cached": 0.075,
1818      "context_window": 128000,
1819      "default_max_tokens": 8192,
1820      "can_reason": false,
1821      "supports_attachments": true,
1822      "options": {}
1823    },
1824    {
1825      "id": "openai/gpt-5",
1826      "name": "OpenAI: GPT-5",
1827      "cost_per_1m_in": 1.25,
1828      "cost_per_1m_out": 10,
1829      "cost_per_1m_in_cached": 0,
1830      "cost_per_1m_out_cached": 0.125,
1831      "context_window": 400000,
1832      "default_max_tokens": 64000,
1833      "can_reason": true,
1834      "reasoning_levels": [
1835        "low",
1836        "medium",
1837        "high"
1838      ],
1839      "default_reasoning_effort": "medium",
1840      "supports_attachments": true,
1841      "options": {}
1842    },
1843    {
1844      "id": "openai/gpt-5-codex",
1845      "name": "OpenAI: GPT-5 Codex",
1846      "cost_per_1m_in": 1.25,
1847      "cost_per_1m_out": 10,
1848      "cost_per_1m_in_cached": 0,
1849      "cost_per_1m_out_cached": 0.125,
1850      "context_window": 400000,
1851      "default_max_tokens": 64000,
1852      "can_reason": true,
1853      "reasoning_levels": [
1854        "low",
1855        "medium",
1856        "high"
1857      ],
1858      "default_reasoning_effort": "medium",
1859      "supports_attachments": true,
1860      "options": {}
1861    },
1862    {
1863      "id": "openai/gpt-5-image",
1864      "name": "OpenAI: GPT-5 Image",
1865      "cost_per_1m_in": 10,
1866      "cost_per_1m_out": 10,
1867      "cost_per_1m_in_cached": 0,
1868      "cost_per_1m_out_cached": 1.25,
1869      "context_window": 400000,
1870      "default_max_tokens": 64000,
1871      "can_reason": true,
1872      "reasoning_levels": [
1873        "low",
1874        "medium",
1875        "high"
1876      ],
1877      "default_reasoning_effort": "medium",
1878      "supports_attachments": true,
1879      "options": {}
1880    },
1881    {
1882      "id": "openai/gpt-5-image-mini",
1883      "name": "OpenAI: GPT-5 Image Mini",
1884      "cost_per_1m_in": 2.5,
1885      "cost_per_1m_out": 2,
1886      "cost_per_1m_in_cached": 0,
1887      "cost_per_1m_out_cached": 0.25,
1888      "context_window": 400000,
1889      "default_max_tokens": 64000,
1890      "can_reason": true,
1891      "reasoning_levels": [
1892        "low",
1893        "medium",
1894        "high"
1895      ],
1896      "default_reasoning_effort": "medium",
1897      "supports_attachments": true,
1898      "options": {}
1899    },
1900    {
1901      "id": "openai/gpt-5-mini",
1902      "name": "OpenAI: GPT-5 Mini",
1903      "cost_per_1m_in": 0.25,
1904      "cost_per_1m_out": 2,
1905      "cost_per_1m_in_cached": 0,
1906      "cost_per_1m_out_cached": 0.03,
1907      "context_window": 400000,
1908      "default_max_tokens": 40000,
1909      "can_reason": true,
1910      "reasoning_levels": [
1911        "low",
1912        "medium",
1913        "high"
1914      ],
1915      "default_reasoning_effort": "medium",
1916      "supports_attachments": true,
1917      "options": {}
1918    },
1919    {
1920      "id": "openai/gpt-5-nano",
1921      "name": "OpenAI: GPT-5 Nano",
1922      "cost_per_1m_in": 0.049999999999999996,
1923      "cost_per_1m_out": 0.39999999999999997,
1924      "cost_per_1m_in_cached": 0,
1925      "cost_per_1m_out_cached": 0.005,
1926      "context_window": 400000,
1927      "default_max_tokens": 64000,
1928      "can_reason": true,
1929      "reasoning_levels": [
1930        "low",
1931        "medium",
1932        "high"
1933      ],
1934      "default_reasoning_effort": "medium",
1935      "supports_attachments": true,
1936      "options": {}
1937    },
1938    {
1939      "id": "openai/gpt-5-pro",
1940      "name": "OpenAI: GPT-5 Pro",
1941      "cost_per_1m_in": 15,
1942      "cost_per_1m_out": 120,
1943      "cost_per_1m_in_cached": 0,
1944      "cost_per_1m_out_cached": 0,
1945      "context_window": 400000,
1946      "default_max_tokens": 64000,
1947      "can_reason": true,
1948      "reasoning_levels": [
1949        "low",
1950        "medium",
1951        "high"
1952      ],
1953      "default_reasoning_effort": "medium",
1954      "supports_attachments": true,
1955      "options": {}
1956    },
1957    {
1958      "id": "openai/gpt-oss-120b",
1959      "name": "OpenAI: gpt-oss-120b",
1960      "cost_per_1m_in": 0.14,
1961      "cost_per_1m_out": 0.95,
1962      "cost_per_1m_in_cached": 0,
1963      "cost_per_1m_out_cached": 0,
1964      "context_window": 131072,
1965      "default_max_tokens": 65536,
1966      "can_reason": true,
1967      "reasoning_levels": [
1968        "low",
1969        "medium",
1970        "high"
1971      ],
1972      "default_reasoning_effort": "medium",
1973      "supports_attachments": false,
1974      "options": {}
1975    },
1976    {
1977      "id": "openai/gpt-oss-120b:exacto",
1978      "name": "OpenAI: gpt-oss-120b (exacto)",
1979      "cost_per_1m_in": 0.049999999999999996,
1980      "cost_per_1m_out": 0.24,
1981      "cost_per_1m_in_cached": 0,
1982      "cost_per_1m_out_cached": 0,
1983      "context_window": 131072,
1984      "default_max_tokens": 13107,
1985      "can_reason": true,
1986      "reasoning_levels": [
1987        "low",
1988        "medium",
1989        "high"
1990      ],
1991      "default_reasoning_effort": "medium",
1992      "supports_attachments": false,
1993      "options": {}
1994    },
1995    {
1996      "id": "openai/gpt-oss-20b",
1997      "name": "OpenAI: gpt-oss-20b",
1998      "cost_per_1m_in": 0.04,
1999      "cost_per_1m_out": 0.15,
2000      "cost_per_1m_in_cached": 0,
2001      "cost_per_1m_out_cached": 0,
2002      "context_window": 131072,
2003      "default_max_tokens": 13107,
2004      "can_reason": true,
2005      "reasoning_levels": [
2006        "low",
2007        "medium",
2008        "high"
2009      ],
2010      "default_reasoning_effort": "medium",
2011      "supports_attachments": false,
2012      "options": {}
2013    },
2014    {
2015      "id": "openai/gpt-oss-20b:free",
2016      "name": "OpenAI: gpt-oss-20b (free)",
2017      "cost_per_1m_in": 0,
2018      "cost_per_1m_out": 0,
2019      "cost_per_1m_in_cached": 0,
2020      "cost_per_1m_out_cached": 0,
2021      "context_window": 131072,
2022      "default_max_tokens": 65536,
2023      "can_reason": true,
2024      "reasoning_levels": [
2025        "low",
2026        "medium",
2027        "high"
2028      ],
2029      "default_reasoning_effort": "medium",
2030      "supports_attachments": false,
2031      "options": {}
2032    },
2033    {
2034      "id": "openai/o1",
2035      "name": "OpenAI: o1",
2036      "cost_per_1m_in": 15,
2037      "cost_per_1m_out": 60,
2038      "cost_per_1m_in_cached": 0,
2039      "cost_per_1m_out_cached": 7.5,
2040      "context_window": 200000,
2041      "default_max_tokens": 50000,
2042      "can_reason": false,
2043      "supports_attachments": true,
2044      "options": {}
2045    },
2046    {
2047      "id": "openai/o3",
2048      "name": "OpenAI: o3",
2049      "cost_per_1m_in": 2,
2050      "cost_per_1m_out": 8,
2051      "cost_per_1m_in_cached": 0,
2052      "cost_per_1m_out_cached": 0.5,
2053      "context_window": 200000,
2054      "default_max_tokens": 50000,
2055      "can_reason": true,
2056      "reasoning_levels": [
2057        "low",
2058        "medium",
2059        "high"
2060      ],
2061      "default_reasoning_effort": "medium",
2062      "supports_attachments": true,
2063      "options": {}
2064    },
2065    {
2066      "id": "openai/o3-deep-research",
2067      "name": "OpenAI: o3 Deep Research",
2068      "cost_per_1m_in": 10,
2069      "cost_per_1m_out": 40,
2070      "cost_per_1m_in_cached": 0,
2071      "cost_per_1m_out_cached": 2.5,
2072      "context_window": 200000,
2073      "default_max_tokens": 50000,
2074      "can_reason": true,
2075      "reasoning_levels": [
2076        "low",
2077        "medium",
2078        "high"
2079      ],
2080      "default_reasoning_effort": "medium",
2081      "supports_attachments": true,
2082      "options": {}
2083    },
2084    {
2085      "id": "openai/o3-mini",
2086      "name": "OpenAI: o3 Mini",
2087      "cost_per_1m_in": 1.1,
2088      "cost_per_1m_out": 4.4,
2089      "cost_per_1m_in_cached": 0,
2090      "cost_per_1m_out_cached": 0.55,
2091      "context_window": 200000,
2092      "default_max_tokens": 50000,
2093      "can_reason": false,
2094      "supports_attachments": false,
2095      "options": {}
2096    },
2097    {
2098      "id": "openai/o3-mini-high",
2099      "name": "OpenAI: o3 Mini High",
2100      "cost_per_1m_in": 1.1,
2101      "cost_per_1m_out": 4.4,
2102      "cost_per_1m_in_cached": 0,
2103      "cost_per_1m_out_cached": 0.55,
2104      "context_window": 200000,
2105      "default_max_tokens": 50000,
2106      "can_reason": false,
2107      "supports_attachments": false,
2108      "options": {}
2109    },
2110    {
2111      "id": "openai/o3-pro",
2112      "name": "OpenAI: o3 Pro",
2113      "cost_per_1m_in": 20,
2114      "cost_per_1m_out": 80,
2115      "cost_per_1m_in_cached": 0,
2116      "cost_per_1m_out_cached": 0,
2117      "context_window": 200000,
2118      "default_max_tokens": 50000,
2119      "can_reason": true,
2120      "reasoning_levels": [
2121        "low",
2122        "medium",
2123        "high"
2124      ],
2125      "default_reasoning_effort": "medium",
2126      "supports_attachments": true,
2127      "options": {}
2128    },
2129    {
2130      "id": "openai/o4-mini",
2131      "name": "OpenAI: o4 Mini",
2132      "cost_per_1m_in": 1.1,
2133      "cost_per_1m_out": 4.4,
2134      "cost_per_1m_in_cached": 0,
2135      "cost_per_1m_out_cached": 0.275,
2136      "context_window": 200000,
2137      "default_max_tokens": 50000,
2138      "can_reason": true,
2139      "reasoning_levels": [
2140        "low",
2141        "medium",
2142        "high"
2143      ],
2144      "default_reasoning_effort": "medium",
2145      "supports_attachments": true,
2146      "options": {}
2147    },
2148    {
2149      "id": "openai/o4-mini-deep-research",
2150      "name": "OpenAI: o4 Mini Deep Research",
2151      "cost_per_1m_in": 2,
2152      "cost_per_1m_out": 8,
2153      "cost_per_1m_in_cached": 0,
2154      "cost_per_1m_out_cached": 0.5,
2155      "context_window": 200000,
2156      "default_max_tokens": 50000,
2157      "can_reason": true,
2158      "reasoning_levels": [
2159        "low",
2160        "medium",
2161        "high"
2162      ],
2163      "default_reasoning_effort": "medium",
2164      "supports_attachments": true,
2165      "options": {}
2166    },
2167    {
2168      "id": "openai/o4-mini-high",
2169      "name": "OpenAI: o4 Mini High",
2170      "cost_per_1m_in": 1.1,
2171      "cost_per_1m_out": 4.4,
2172      "cost_per_1m_in_cached": 0,
2173      "cost_per_1m_out_cached": 0.275,
2174      "context_window": 200000,
2175      "default_max_tokens": 50000,
2176      "can_reason": true,
2177      "reasoning_levels": [
2178        "low",
2179        "medium",
2180        "high"
2181      ],
2182      "default_reasoning_effort": "medium",
2183      "supports_attachments": true,
2184      "options": {}
2185    },
2186    {
2187      "id": "qwen/qwen-2.5-72b-instruct",
2188      "name": "Qwen2.5 72B Instruct",
2189      "cost_per_1m_in": 0.13,
2190      "cost_per_1m_out": 0.39999999999999997,
2191      "cost_per_1m_in_cached": 0,
2192      "cost_per_1m_out_cached": 0,
2193      "context_window": 131072,
2194      "default_max_tokens": 13107,
2195      "can_reason": false,
2196      "supports_attachments": false,
2197      "options": {}
2198    },
2199    {
2200      "id": "qwen/qwq-32b",
2201      "name": "Qwen: QwQ 32B",
2202      "cost_per_1m_in": 0.15,
2203      "cost_per_1m_out": 0.58,
2204      "cost_per_1m_in_cached": 0,
2205      "cost_per_1m_out_cached": 0,
2206      "context_window": 131072,
2207      "default_max_tokens": 65536,
2208      "can_reason": true,
2209      "reasoning_levels": [
2210        "low",
2211        "medium",
2212        "high"
2213      ],
2214      "default_reasoning_effort": "medium",
2215      "supports_attachments": false,
2216      "options": {}
2217    },
2218    {
2219      "id": "qwen/qwen-plus-2025-07-28",
2220      "name": "Qwen: Qwen Plus 0728",
2221      "cost_per_1m_in": 0.39999999999999997,
2222      "cost_per_1m_out": 1.2,
2223      "cost_per_1m_in_cached": 0,
2224      "cost_per_1m_out_cached": 0,
2225      "context_window": 1000000,
2226      "default_max_tokens": 16384,
2227      "can_reason": false,
2228      "supports_attachments": false,
2229      "options": {}
2230    },
2231    {
2232      "id": "qwen/qwen-plus-2025-07-28:thinking",
2233      "name": "Qwen: Qwen Plus 0728 (thinking)",
2234      "cost_per_1m_in": 0.39999999999999997,
2235      "cost_per_1m_out": 4,
2236      "cost_per_1m_in_cached": 0,
2237      "cost_per_1m_out_cached": 0,
2238      "context_window": 1000000,
2239      "default_max_tokens": 16384,
2240      "can_reason": true,
2241      "reasoning_levels": [
2242        "low",
2243        "medium",
2244        "high"
2245      ],
2246      "default_reasoning_effort": "medium",
2247      "supports_attachments": false,
2248      "options": {}
2249    },
2250    {
2251      "id": "qwen/qwen-vl-max",
2252      "name": "Qwen: Qwen VL Max",
2253      "cost_per_1m_in": 0.7999999999999999,
2254      "cost_per_1m_out": 3.1999999999999997,
2255      "cost_per_1m_in_cached": 0,
2256      "cost_per_1m_out_cached": 0,
2257      "context_window": 131072,
2258      "default_max_tokens": 4096,
2259      "can_reason": false,
2260      "supports_attachments": true,
2261      "options": {}
2262    },
2263    {
2264      "id": "qwen/qwen-max",
2265      "name": "Qwen: Qwen-Max ",
2266      "cost_per_1m_in": 1.5999999999999999,
2267      "cost_per_1m_out": 6.3999999999999995,
2268      "cost_per_1m_in_cached": 0,
2269      "cost_per_1m_out_cached": 0.64,
2270      "context_window": 32768,
2271      "default_max_tokens": 4096,
2272      "can_reason": false,
2273      "supports_attachments": false,
2274      "options": {}
2275    },
2276    {
2277      "id": "qwen/qwen-plus",
2278      "name": "Qwen: Qwen-Plus",
2279      "cost_per_1m_in": 0.39999999999999997,
2280      "cost_per_1m_out": 1.2,
2281      "cost_per_1m_in_cached": 0,
2282      "cost_per_1m_out_cached": 0.16,
2283      "context_window": 131072,
2284      "default_max_tokens": 4096,
2285      "can_reason": false,
2286      "supports_attachments": false,
2287      "options": {}
2288    },
2289    {
2290      "id": "qwen/qwen-turbo",
2291      "name": "Qwen: Qwen-Turbo",
2292      "cost_per_1m_in": 0.049999999999999996,
2293      "cost_per_1m_out": 0.19999999999999998,
2294      "cost_per_1m_in_cached": 0,
2295      "cost_per_1m_out_cached": 0.02,
2296      "context_window": 1000000,
2297      "default_max_tokens": 4096,
2298      "can_reason": false,
2299      "supports_attachments": false,
2300      "options": {}
2301    },
2302    {
2303      "id": "qwen/qwen3-14b",
2304      "name": "Qwen: Qwen3 14B",
2305      "cost_per_1m_in": 0.06,
2306      "cost_per_1m_out": 0.24,
2307      "cost_per_1m_in_cached": 0,
2308      "cost_per_1m_out_cached": 0,
2309      "context_window": 40960,
2310      "default_max_tokens": 20480,
2311      "can_reason": true,
2312      "reasoning_levels": [
2313        "low",
2314        "medium",
2315        "high"
2316      ],
2317      "default_reasoning_effort": "medium",
2318      "supports_attachments": false,
2319      "options": {}
2320    },
2321    {
2322      "id": "qwen/qwen3-235b-a22b",
2323      "name": "Qwen: Qwen3 235B A22B",
2324      "cost_per_1m_in": 0.22,
2325      "cost_per_1m_out": 0.88,
2326      "cost_per_1m_in_cached": 0,
2327      "cost_per_1m_out_cached": 0,
2328      "context_window": 131072,
2329      "default_max_tokens": 8192,
2330      "can_reason": true,
2331      "reasoning_levels": [
2332        "low",
2333        "medium",
2334        "high"
2335      ],
2336      "default_reasoning_effort": "medium",
2337      "supports_attachments": false,
2338      "options": {}
2339    },
2340    {
2341      "id": "qwen/qwen3-235b-a22b-2507",
2342      "name": "Qwen: Qwen3 235B A22B Instruct 2507",
2343      "cost_per_1m_in": 0.35,
2344      "cost_per_1m_out": 1.2,
2345      "cost_per_1m_in_cached": 0,
2346      "cost_per_1m_out_cached": 0,
2347      "context_window": 262144,
2348      "default_max_tokens": 131072,
2349      "can_reason": false,
2350      "supports_attachments": false,
2351      "options": {}
2352    },
2353    {
2354      "id": "qwen/qwen3-235b-a22b-thinking-2507",
2355      "name": "Qwen: Qwen3 235B A22B Thinking 2507",
2356      "cost_per_1m_in": 0.11,
2357      "cost_per_1m_out": 0.6,
2358      "cost_per_1m_in_cached": 0,
2359      "cost_per_1m_out_cached": 0,
2360      "context_window": 262144,
2361      "default_max_tokens": 131072,
2362      "can_reason": true,
2363      "reasoning_levels": [
2364        "low",
2365        "medium",
2366        "high"
2367      ],
2368      "default_reasoning_effort": "medium",
2369      "supports_attachments": false,
2370      "options": {}
2371    },
2372    {
2373      "id": "qwen/qwen3-30b-a3b",
2374      "name": "Qwen: Qwen3 30B A3B",
2375      "cost_per_1m_in": 0.15,
2376      "cost_per_1m_out": 0.6,
2377      "cost_per_1m_in_cached": 0,
2378      "cost_per_1m_out_cached": 0,
2379      "context_window": 131072,
2380      "default_max_tokens": 4000,
2381      "can_reason": true,
2382      "reasoning_levels": [
2383        "low",
2384        "medium",
2385        "high"
2386      ],
2387      "default_reasoning_effort": "medium",
2388      "supports_attachments": false,
2389      "options": {}
2390    },
2391    {
2392      "id": "qwen/qwen3-30b-a3b-instruct-2507",
2393      "name": "Qwen: Qwen3 30B A3B Instruct 2507",
2394      "cost_per_1m_in": 0.09999999999999999,
2395      "cost_per_1m_out": 0.3,
2396      "cost_per_1m_in_cached": 0,
2397      "cost_per_1m_out_cached": 0,
2398      "context_window": 262144,
2399      "default_max_tokens": 26214,
2400      "can_reason": false,
2401      "supports_attachments": false,
2402      "options": {}
2403    },
2404    {
2405      "id": "qwen/qwen3-30b-a3b-thinking-2507",
2406      "name": "Qwen: Qwen3 30B A3B Thinking 2507",
2407      "cost_per_1m_in": 0.09999999999999999,
2408      "cost_per_1m_out": 0.3,
2409      "cost_per_1m_in_cached": 0,
2410      "cost_per_1m_out_cached": 0,
2411      "context_window": 262144,
2412      "default_max_tokens": 26214,
2413      "can_reason": true,
2414      "reasoning_levels": [
2415        "low",
2416        "medium",
2417        "high"
2418      ],
2419      "default_reasoning_effort": "medium",
2420      "supports_attachments": false,
2421      "options": {}
2422    },
2423    {
2424      "id": "qwen/qwen3-32b",
2425      "name": "Qwen: Qwen3 32B",
2426      "cost_per_1m_in": 0.15,
2427      "cost_per_1m_out": 0.5,
2428      "cost_per_1m_in_cached": 0,
2429      "cost_per_1m_out_cached": 0,
2430      "context_window": 131072,
2431      "default_max_tokens": 4000,
2432      "can_reason": true,
2433      "reasoning_levels": [
2434        "low",
2435        "medium",
2436        "high"
2437      ],
2438      "default_reasoning_effort": "medium",
2439      "supports_attachments": false,
2440      "options": {}
2441    },
2442    {
2443      "id": "qwen/qwen3-4b:free",
2444      "name": "Qwen: Qwen3 4B (free)",
2445      "cost_per_1m_in": 0,
2446      "cost_per_1m_out": 0,
2447      "cost_per_1m_in_cached": 0,
2448      "cost_per_1m_out_cached": 0,
2449      "context_window": 40960,
2450      "default_max_tokens": 4096,
2451      "can_reason": true,
2452      "reasoning_levels": [
2453        "low",
2454        "medium",
2455        "high"
2456      ],
2457      "default_reasoning_effort": "medium",
2458      "supports_attachments": false,
2459      "options": {}
2460    },
2461    {
2462      "id": "qwen/qwen3-coder-30b-a3b-instruct",
2463      "name": "Qwen: Qwen3 Coder 30B A3B Instruct",
2464      "cost_per_1m_in": 0.06,
2465      "cost_per_1m_out": 0.25,
2466      "cost_per_1m_in_cached": 0,
2467      "cost_per_1m_out_cached": 0,
2468      "context_window": 262144,
2469      "default_max_tokens": 131072,
2470      "can_reason": false,
2471      "supports_attachments": false,
2472      "options": {}
2473    },
2474    {
2475      "id": "qwen/qwen3-coder",
2476      "name": "Qwen: Qwen3 Coder 480B A35B",
2477      "cost_per_1m_in": 0.22,
2478      "cost_per_1m_out": 0.95,
2479      "cost_per_1m_in_cached": 0,
2480      "cost_per_1m_out_cached": 0,
2481      "context_window": 262144,
2482      "default_max_tokens": 131072,
2483      "can_reason": false,
2484      "supports_attachments": false,
2485      "options": {}
2486    },
2487    {
2488      "id": "qwen/qwen3-coder:exacto",
2489      "name": "Qwen: Qwen3 Coder 480B A35B (exacto)",
2490      "cost_per_1m_in": 0.38,
2491      "cost_per_1m_out": 1.53,
2492      "cost_per_1m_in_cached": 0,
2493      "cost_per_1m_out_cached": 0,
2494      "context_window": 262144,
2495      "default_max_tokens": 131072,
2496      "can_reason": true,
2497      "reasoning_levels": [
2498        "low",
2499        "medium",
2500        "high"
2501      ],
2502      "default_reasoning_effort": "medium",
2503      "supports_attachments": false,
2504      "options": {}
2505    },
2506    {
2507      "id": "qwen/qwen3-coder:free",
2508      "name": "Qwen: Qwen3 Coder 480B A35B (free)",
2509      "cost_per_1m_in": 0,
2510      "cost_per_1m_out": 0,
2511      "cost_per_1m_in_cached": 0,
2512      "cost_per_1m_out_cached": 0,
2513      "context_window": 262144,
2514      "default_max_tokens": 26214,
2515      "can_reason": false,
2516      "supports_attachments": false,
2517      "options": {}
2518    },
2519    {
2520      "id": "qwen/qwen3-coder-flash",
2521      "name": "Qwen: Qwen3 Coder Flash",
2522      "cost_per_1m_in": 0.3,
2523      "cost_per_1m_out": 1.5,
2524      "cost_per_1m_in_cached": 0,
2525      "cost_per_1m_out_cached": 0.08,
2526      "context_window": 128000,
2527      "default_max_tokens": 32768,
2528      "can_reason": false,
2529      "supports_attachments": false,
2530      "options": {}
2531    },
2532    {
2533      "id": "qwen/qwen3-coder-plus",
2534      "name": "Qwen: Qwen3 Coder Plus",
2535      "cost_per_1m_in": 1,
2536      "cost_per_1m_out": 5,
2537      "cost_per_1m_in_cached": 0,
2538      "cost_per_1m_out_cached": 0.09999999999999999,
2539      "context_window": 128000,
2540      "default_max_tokens": 32768,
2541      "can_reason": false,
2542      "supports_attachments": false,
2543      "options": {}
2544    },
2545    {
2546      "id": "qwen/qwen3-max",
2547      "name": "Qwen: Qwen3 Max",
2548      "cost_per_1m_in": 1.2,
2549      "cost_per_1m_out": 6,
2550      "cost_per_1m_in_cached": 0,
2551      "cost_per_1m_out_cached": 0.24,
2552      "context_window": 256000,
2553      "default_max_tokens": 16384,
2554      "can_reason": false,
2555      "supports_attachments": false,
2556      "options": {}
2557    },
2558    {
2559      "id": "qwen/qwen3-next-80b-a3b-instruct",
2560      "name": "Qwen: Qwen3 Next 80B A3B Instruct",
2561      "cost_per_1m_in": 0.15,
2562      "cost_per_1m_out": 1.5,
2563      "cost_per_1m_in_cached": 0,
2564      "cost_per_1m_out_cached": 0,
2565      "context_window": 262144,
2566      "default_max_tokens": 26214,
2567      "can_reason": false,
2568      "supports_attachments": false,
2569      "options": {}
2570    },
2571    {
2572      "id": "qwen/qwen3-next-80b-a3b-thinking",
2573      "name": "Qwen: Qwen3 Next 80B A3B Thinking",
2574      "cost_per_1m_in": 0.15,
2575      "cost_per_1m_out": 1.5,
2576      "cost_per_1m_in_cached": 0,
2577      "cost_per_1m_out_cached": 0,
2578      "context_window": 131072,
2579      "default_max_tokens": 16384,
2580      "can_reason": true,
2581      "reasoning_levels": [
2582        "low",
2583        "medium",
2584        "high"
2585      ],
2586      "default_reasoning_effort": "medium",
2587      "supports_attachments": false,
2588      "options": {}
2589    },
2590    {
2591      "id": "qwen/qwen3-vl-235b-a22b-thinking",
2592      "name": "Qwen: Qwen3 VL 235B A22B Thinking",
2593      "cost_per_1m_in": 0.3,
2594      "cost_per_1m_out": 1.2,
2595      "cost_per_1m_in_cached": 0,
2596      "cost_per_1m_out_cached": 0,
2597      "context_window": 262144,
2598      "default_max_tokens": 131072,
2599      "can_reason": true,
2600      "reasoning_levels": [
2601        "low",
2602        "medium",
2603        "high"
2604      ],
2605      "default_reasoning_effort": "medium",
2606      "supports_attachments": true,
2607      "options": {}
2608    },
2609    {
2610      "id": "qwen/qwen3-vl-30b-a3b-thinking",
2611      "name": "Qwen: Qwen3 VL 30B A3B Thinking",
2612      "cost_per_1m_in": 0.19999999999999998,
2613      "cost_per_1m_out": 1,
2614      "cost_per_1m_in_cached": 0,
2615      "cost_per_1m_out_cached": 0,
2616      "context_window": 131072,
2617      "default_max_tokens": 16384,
2618      "can_reason": true,
2619      "reasoning_levels": [
2620        "low",
2621        "medium",
2622        "high"
2623      ],
2624      "default_reasoning_effort": "medium",
2625      "supports_attachments": true,
2626      "options": {}
2627    },
2628    {
2629      "id": "qwen/qwen3-vl-8b-instruct",
2630      "name": "Qwen: Qwen3 VL 8B Instruct",
2631      "cost_per_1m_in": 0.08,
2632      "cost_per_1m_out": 0.5,
2633      "cost_per_1m_in_cached": 0,
2634      "cost_per_1m_out_cached": 0,
2635      "context_window": 131072,
2636      "default_max_tokens": 16384,
2637      "can_reason": false,
2638      "supports_attachments": true,
2639      "options": {}
2640    },
2641    {
2642      "id": "qwen/qwen3-vl-8b-thinking",
2643      "name": "Qwen: Qwen3 VL 8B Thinking",
2644      "cost_per_1m_in": 0.18,
2645      "cost_per_1m_out": 2.0999999999999996,
2646      "cost_per_1m_in_cached": 0,
2647      "cost_per_1m_out_cached": 0,
2648      "context_window": 256000,
2649      "default_max_tokens": 16384,
2650      "can_reason": true,
2651      "reasoning_levels": [
2652        "low",
2653        "medium",
2654        "high"
2655      ],
2656      "default_reasoning_effort": "medium",
2657      "supports_attachments": true,
2658      "options": {}
2659    },
2660    {
2661      "id": "stepfun-ai/step3",
2662      "name": "StepFun: Step3",
2663      "cost_per_1m_in": 0.5700000000000001,
2664      "cost_per_1m_out": 1.42,
2665      "cost_per_1m_in_cached": 0,
2666      "cost_per_1m_out_cached": 0,
2667      "context_window": 65536,
2668      "default_max_tokens": 32768,
2669      "can_reason": true,
2670      "reasoning_levels": [
2671        "low",
2672        "medium",
2673        "high"
2674      ],
2675      "default_reasoning_effort": "medium",
2676      "supports_attachments": true,
2677      "options": {}
2678    },
2679    {
2680      "id": "tngtech/deepseek-r1t2-chimera",
2681      "name": "TNG: DeepSeek R1T2 Chimera",
2682      "cost_per_1m_in": 0.3,
2683      "cost_per_1m_out": 1.2,
2684      "cost_per_1m_in_cached": 0,
2685      "cost_per_1m_out_cached": 0,
2686      "context_window": 163840,
2687      "default_max_tokens": 81920,
2688      "can_reason": true,
2689      "reasoning_levels": [
2690        "low",
2691        "medium",
2692        "high"
2693      ],
2694      "default_reasoning_effort": "medium",
2695      "supports_attachments": false,
2696      "options": {}
2697    },
2698    {
2699      "id": "thedrummer/rocinante-12b",
2700      "name": "TheDrummer: Rocinante 12B",
2701      "cost_per_1m_in": 0.16999999999999998,
2702      "cost_per_1m_out": 0.43,
2703      "cost_per_1m_in_cached": 0,
2704      "cost_per_1m_out_cached": 0,
2705      "context_window": 32768,
2706      "default_max_tokens": 3276,
2707      "can_reason": false,
2708      "supports_attachments": false,
2709      "options": {}
2710    },
2711    {
2712      "id": "thedrummer/unslopnemo-12b",
2713      "name": "TheDrummer: UnslopNemo 12B",
2714      "cost_per_1m_in": 0.39999999999999997,
2715      "cost_per_1m_out": 0.39999999999999997,
2716      "cost_per_1m_in_cached": 0,
2717      "cost_per_1m_out_cached": 0,
2718      "context_window": 32768,
2719      "default_max_tokens": 3276,
2720      "can_reason": false,
2721      "supports_attachments": false,
2722      "options": {}
2723    },
2724    {
2725      "id": "alibaba/tongyi-deepresearch-30b-a3b",
2726      "name": "Tongyi DeepResearch 30B A3B",
2727      "cost_per_1m_in": 0.09,
2728      "cost_per_1m_out": 0.44999999999999996,
2729      "cost_per_1m_in_cached": 0,
2730      "cost_per_1m_out_cached": 0,
2731      "context_window": 131072,
2732      "default_max_tokens": 65536,
2733      "can_reason": true,
2734      "reasoning_levels": [
2735        "low",
2736        "medium",
2737        "high"
2738      ],
2739      "default_reasoning_effort": "medium",
2740      "supports_attachments": false,
2741      "options": {}
2742    },
2743    {
2744      "id": "alibaba/tongyi-deepresearch-30b-a3b:free",
2745      "name": "Tongyi DeepResearch 30B A3B (free)",
2746      "cost_per_1m_in": 0,
2747      "cost_per_1m_out": 0,
2748      "cost_per_1m_in_cached": 0,
2749      "cost_per_1m_out_cached": 0,
2750      "context_window": 131072,
2751      "default_max_tokens": 65536,
2752      "can_reason": true,
2753      "reasoning_levels": [
2754        "low",
2755        "medium",
2756        "high"
2757      ],
2758      "default_reasoning_effort": "medium",
2759      "supports_attachments": false,
2760      "options": {}
2761    },
2762    {
2763      "id": "z-ai/glm-4-32b",
2764      "name": "Z.AI: GLM 4 32B ",
2765      "cost_per_1m_in": 0.09999999999999999,
2766      "cost_per_1m_out": 0.09999999999999999,
2767      "cost_per_1m_in_cached": 0,
2768      "cost_per_1m_out_cached": 0,
2769      "context_window": 128000,
2770      "default_max_tokens": 12800,
2771      "can_reason": false,
2772      "supports_attachments": false,
2773      "options": {}
2774    },
2775    {
2776      "id": "z-ai/glm-4.5",
2777      "name": "Z.AI: GLM 4.5",
2778      "cost_per_1m_in": 0.35,
2779      "cost_per_1m_out": 1.55,
2780      "cost_per_1m_in_cached": 0,
2781      "cost_per_1m_out_cached": 0,
2782      "context_window": 131072,
2783      "default_max_tokens": 65536,
2784      "can_reason": true,
2785      "reasoning_levels": [
2786        "low",
2787        "medium",
2788        "high"
2789      ],
2790      "default_reasoning_effort": "medium",
2791      "supports_attachments": false,
2792      "options": {}
2793    },
2794    {
2795      "id": "z-ai/glm-4.5-air",
2796      "name": "Z.AI: GLM 4.5 Air",
2797      "cost_per_1m_in": 0.13,
2798      "cost_per_1m_out": 0.85,
2799      "cost_per_1m_in_cached": 0,
2800      "cost_per_1m_out_cached": 0,
2801      "context_window": 131072,
2802      "default_max_tokens": 49152,
2803      "can_reason": true,
2804      "reasoning_levels": [
2805        "low",
2806        "medium",
2807        "high"
2808      ],
2809      "default_reasoning_effort": "medium",
2810      "supports_attachments": false,
2811      "options": {}
2812    },
2813    {
2814      "id": "z-ai/glm-4.5-air:free",
2815      "name": "Z.AI: GLM 4.5 Air (free)",
2816      "cost_per_1m_in": 0,
2817      "cost_per_1m_out": 0,
2818      "cost_per_1m_in_cached": 0,
2819      "cost_per_1m_out_cached": 0,
2820      "context_window": 131072,
2821      "default_max_tokens": 48000,
2822      "can_reason": true,
2823      "reasoning_levels": [
2824        "low",
2825        "medium",
2826        "high"
2827      ],
2828      "default_reasoning_effort": "medium",
2829      "supports_attachments": false,
2830      "options": {}
2831    },
2832    {
2833      "id": "z-ai/glm-4.5v",
2834      "name": "Z.AI: GLM 4.5V",
2835      "cost_per_1m_in": 0.6,
2836      "cost_per_1m_out": 1.7999999999999998,
2837      "cost_per_1m_in_cached": 0,
2838      "cost_per_1m_out_cached": 0.11,
2839      "context_window": 65536,
2840      "default_max_tokens": 8192,
2841      "can_reason": true,
2842      "reasoning_levels": [
2843        "low",
2844        "medium",
2845        "high"
2846      ],
2847      "default_reasoning_effort": "medium",
2848      "supports_attachments": true,
2849      "options": {}
2850    },
2851    {
2852      "id": "z-ai/glm-4.6",
2853      "name": "Z.AI: GLM 4.6",
2854      "cost_per_1m_in": 0.6,
2855      "cost_per_1m_out": 2.2,
2856      "cost_per_1m_in_cached": 0,
2857      "cost_per_1m_out_cached": 0.11,
2858      "context_window": 204800,
2859      "default_max_tokens": 65536,
2860      "can_reason": true,
2861      "reasoning_levels": [
2862        "low",
2863        "medium",
2864        "high"
2865      ],
2866      "default_reasoning_effort": "medium",
2867      "supports_attachments": false,
2868      "options": {}
2869    },
2870    {
2871      "id": "z-ai/glm-4.6:exacto",
2872      "name": "Z.AI: GLM 4.6 (exacto)",
2873      "cost_per_1m_in": 0.6,
2874      "cost_per_1m_out": 1.9,
2875      "cost_per_1m_in_cached": 0,
2876      "cost_per_1m_out_cached": 0,
2877      "context_window": 202752,
2878      "default_max_tokens": 20275,
2879      "can_reason": true,
2880      "reasoning_levels": [
2881        "low",
2882        "medium",
2883        "high"
2884      ],
2885      "default_reasoning_effort": "medium",
2886      "supports_attachments": false,
2887      "options": {}
2888    },
2889    {
2890      "id": "inclusionai/ling-1t",
2891      "name": "inclusionAI: Ling-1T",
2892      "cost_per_1m_in": 0.5700000000000001,
2893      "cost_per_1m_out": 2.2800000000000002,
2894      "cost_per_1m_in_cached": 0,
2895      "cost_per_1m_out_cached": 0,
2896      "context_window": 131072,
2897      "default_max_tokens": 65536,
2898      "can_reason": false,
2899      "supports_attachments": false,
2900      "options": {}
2901    },
2902    {
2903      "id": "inclusionai/ring-1t",
2904      "name": "inclusionAI: Ring 1T",
2905      "cost_per_1m_in": 0.5700000000000001,
2906      "cost_per_1m_out": 2.2800000000000002,
2907      "cost_per_1m_in_cached": 0,
2908      "cost_per_1m_out_cached": 0,
2909      "context_window": 131072,
2910      "default_max_tokens": 65536,
2911      "can_reason": true,
2912      "reasoning_levels": [
2913        "low",
2914        "medium",
2915        "high"
2916      ],
2917      "default_reasoning_effort": "medium",
2918      "supports_attachments": false,
2919      "options": {}
2920    },
2921    {
2922      "id": "x-ai/grok-3",
2923      "name": "xAI: Grok 3",
2924      "cost_per_1m_in": 3,
2925      "cost_per_1m_out": 15,
2926      "cost_per_1m_in_cached": 0,
2927      "cost_per_1m_out_cached": 0.75,
2928      "context_window": 131072,
2929      "default_max_tokens": 13107,
2930      "can_reason": false,
2931      "supports_attachments": false,
2932      "options": {}
2933    },
2934    {
2935      "id": "x-ai/grok-3-beta",
2936      "name": "xAI: Grok 3 Beta",
2937      "cost_per_1m_in": 3,
2938      "cost_per_1m_out": 15,
2939      "cost_per_1m_in_cached": 0,
2940      "cost_per_1m_out_cached": 0.75,
2941      "context_window": 131072,
2942      "default_max_tokens": 13107,
2943      "can_reason": false,
2944      "supports_attachments": false,
2945      "options": {}
2946    },
2947    {
2948      "id": "x-ai/grok-3-mini",
2949      "name": "xAI: Grok 3 Mini",
2950      "cost_per_1m_in": 0.3,
2951      "cost_per_1m_out": 0.5,
2952      "cost_per_1m_in_cached": 0,
2953      "cost_per_1m_out_cached": 0.075,
2954      "context_window": 131072,
2955      "default_max_tokens": 13107,
2956      "can_reason": true,
2957      "reasoning_levels": [
2958        "low",
2959        "medium",
2960        "high"
2961      ],
2962      "default_reasoning_effort": "medium",
2963      "supports_attachments": false,
2964      "options": {}
2965    },
2966    {
2967      "id": "x-ai/grok-3-mini-beta",
2968      "name": "xAI: Grok 3 Mini Beta",
2969      "cost_per_1m_in": 0.3,
2970      "cost_per_1m_out": 0.5,
2971      "cost_per_1m_in_cached": 0,
2972      "cost_per_1m_out_cached": 0.075,
2973      "context_window": 131072,
2974      "default_max_tokens": 13107,
2975      "can_reason": true,
2976      "reasoning_levels": [
2977        "low",
2978        "medium",
2979        "high"
2980      ],
2981      "default_reasoning_effort": "medium",
2982      "supports_attachments": false,
2983      "options": {}
2984    },
2985    {
2986      "id": "x-ai/grok-4",
2987      "name": "xAI: Grok 4",
2988      "cost_per_1m_in": 3,
2989      "cost_per_1m_out": 15,
2990      "cost_per_1m_in_cached": 0,
2991      "cost_per_1m_out_cached": 0.75,
2992      "context_window": 256000,
2993      "default_max_tokens": 25600,
2994      "can_reason": true,
2995      "reasoning_levels": [
2996        "low",
2997        "medium",
2998        "high"
2999      ],
3000      "default_reasoning_effort": "medium",
3001      "supports_attachments": true,
3002      "options": {}
3003    },
3004    {
3005      "id": "x-ai/grok-4-fast",
3006      "name": "xAI: Grok 4 Fast",
3007      "cost_per_1m_in": 0.19999999999999998,
3008      "cost_per_1m_out": 0.5,
3009      "cost_per_1m_in_cached": 0,
3010      "cost_per_1m_out_cached": 0.049999999999999996,
3011      "context_window": 2000000,
3012      "default_max_tokens": 15000,
3013      "can_reason": true,
3014      "reasoning_levels": [
3015        "low",
3016        "medium",
3017        "high"
3018      ],
3019      "default_reasoning_effort": "medium",
3020      "supports_attachments": true,
3021      "options": {}
3022    },
3023    {
3024      "id": "x-ai/grok-code-fast-1",
3025      "name": "xAI: Grok Code Fast 1",
3026      "cost_per_1m_in": 0.19999999999999998,
3027      "cost_per_1m_out": 1.5,
3028      "cost_per_1m_in_cached": 0,
3029      "cost_per_1m_out_cached": 0.02,
3030      "context_window": 256000,
3031      "default_max_tokens": 5000,
3032      "can_reason": true,
3033      "reasoning_levels": [
3034        "low",
3035        "medium",
3036        "high"
3037      ],
3038      "default_reasoning_effort": "medium",
3039      "supports_attachments": false,
3040      "options": {}
3041    }
3042  ],
3043  "default_headers": {
3044    "HTTP-Referer": "https://charm.land",
3045    "X-Title": "Crush"
3046  }
3047}