openrouter.json

   1{
   2  "name": "OpenRouter",
   3  "id": "openrouter",
   4  "api_key": "$OPENROUTER_API_KEY",
   5  "api_endpoint": "https://openrouter.ai/api/v1",
   6  "type": "openrouter",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-3.5-haiku",
   9  "models": [
  10    {
  11      "id": "ai21/jamba-large-1.7",
  12      "name": "AI21: Jamba Large 1.7",
  13      "cost_per_1m_in": 2,
  14      "cost_per_1m_out": 8,
  15      "cost_per_1m_in_cached": 0,
  16      "cost_per_1m_out_cached": 0,
  17      "context_window": 256000,
  18      "default_max_tokens": 2048,
  19      "can_reason": false,
  20      "supports_attachments": false,
  21      "options": {}
  22    },
  23    {
  24      "id": "ai21/jamba-mini-1.7",
  25      "name": "AI21: Jamba Mini 1.7",
  26      "cost_per_1m_in": 0.19999999999999998,
  27      "cost_per_1m_out": 0.39999999999999997,
  28      "cost_per_1m_in_cached": 0,
  29      "cost_per_1m_out_cached": 0,
  30      "context_window": 256000,
  31      "default_max_tokens": 2048,
  32      "can_reason": false,
  33      "supports_attachments": false,
  34      "options": {}
  35    },
  36    {
  37      "id": "amazon/nova-lite-v1",
  38      "name": "Amazon: Nova Lite 1.0",
  39      "cost_per_1m_in": 0.06,
  40      "cost_per_1m_out": 0.24,
  41      "cost_per_1m_in_cached": 0,
  42      "cost_per_1m_out_cached": 0,
  43      "context_window": 300000,
  44      "default_max_tokens": 2560,
  45      "can_reason": false,
  46      "supports_attachments": true,
  47      "options": {}
  48    },
  49    {
  50      "id": "amazon/nova-micro-v1",
  51      "name": "Amazon: Nova Micro 1.0",
  52      "cost_per_1m_in": 0.035,
  53      "cost_per_1m_out": 0.14,
  54      "cost_per_1m_in_cached": 0,
  55      "cost_per_1m_out_cached": 0,
  56      "context_window": 128000,
  57      "default_max_tokens": 2560,
  58      "can_reason": false,
  59      "supports_attachments": false,
  60      "options": {}
  61    },
  62    {
  63      "id": "amazon/nova-pro-v1",
  64      "name": "Amazon: Nova Pro 1.0",
  65      "cost_per_1m_in": 0.7999999999999999,
  66      "cost_per_1m_out": 3.1999999999999997,
  67      "cost_per_1m_in_cached": 0,
  68      "cost_per_1m_out_cached": 0,
  69      "context_window": 300000,
  70      "default_max_tokens": 2560,
  71      "can_reason": false,
  72      "supports_attachments": true,
  73      "options": {}
  74    },
  75    {
  76      "id": "anthropic/claude-3-haiku",
  77      "name": "Anthropic: Claude 3 Haiku",
  78      "cost_per_1m_in": 0.25,
  79      "cost_per_1m_out": 1.25,
  80      "cost_per_1m_in_cached": 0.3,
  81      "cost_per_1m_out_cached": 0.03,
  82      "context_window": 200000,
  83      "default_max_tokens": 2048,
  84      "can_reason": false,
  85      "supports_attachments": true,
  86      "options": {}
  87    },
  88    {
  89      "id": "anthropic/claude-3-opus",
  90      "name": "Anthropic: Claude 3 Opus",
  91      "cost_per_1m_in": 15,
  92      "cost_per_1m_out": 75,
  93      "cost_per_1m_in_cached": 18.75,
  94      "cost_per_1m_out_cached": 1.5,
  95      "context_window": 200000,
  96      "default_max_tokens": 2048,
  97      "can_reason": false,
  98      "supports_attachments": true,
  99      "options": {}
 100    },
 101    {
 102      "id": "anthropic/claude-3.5-haiku",
 103      "name": "Anthropic: Claude 3.5 Haiku",
 104      "cost_per_1m_in": 0.7999999999999999,
 105      "cost_per_1m_out": 4,
 106      "cost_per_1m_in_cached": 1,
 107      "cost_per_1m_out_cached": 0.08,
 108      "context_window": 200000,
 109      "default_max_tokens": 4096,
 110      "can_reason": false,
 111      "supports_attachments": true,
 112      "options": {}
 113    },
 114    {
 115      "id": "anthropic/claude-3.5-haiku-20241022",
 116      "name": "Anthropic: Claude 3.5 Haiku (2024-10-22)",
 117      "cost_per_1m_in": 0.7999999999999999,
 118      "cost_per_1m_out": 4,
 119      "cost_per_1m_in_cached": 1,
 120      "cost_per_1m_out_cached": 0.08,
 121      "context_window": 200000,
 122      "default_max_tokens": 4096,
 123      "can_reason": false,
 124      "supports_attachments": true,
 125      "options": {}
 126    },
 127    {
 128      "id": "anthropic/claude-3.5-sonnet",
 129      "name": "Anthropic: Claude 3.5 Sonnet",
 130      "cost_per_1m_in": 3,
 131      "cost_per_1m_out": 15,
 132      "cost_per_1m_in_cached": 0,
 133      "cost_per_1m_out_cached": 0,
 134      "context_window": 200000,
 135      "default_max_tokens": 4096,
 136      "can_reason": false,
 137      "supports_attachments": true,
 138      "options": {}
 139    },
 140    {
 141      "id": "anthropic/claude-3.5-sonnet-20240620",
 142      "name": "Anthropic: Claude 3.5 Sonnet (2024-06-20)",
 143      "cost_per_1m_in": 3,
 144      "cost_per_1m_out": 15,
 145      "cost_per_1m_in_cached": 3.75,
 146      "cost_per_1m_out_cached": 0.3,
 147      "context_window": 200000,
 148      "default_max_tokens": 4096,
 149      "can_reason": false,
 150      "supports_attachments": true,
 151      "options": {}
 152    },
 153    {
 154      "id": "anthropic/claude-3.7-sonnet",
 155      "name": "Anthropic: Claude 3.7 Sonnet",
 156      "cost_per_1m_in": 3,
 157      "cost_per_1m_out": 15,
 158      "cost_per_1m_in_cached": 3.75,
 159      "cost_per_1m_out_cached": 0.3,
 160      "context_window": 200000,
 161      "default_max_tokens": 64000,
 162      "can_reason": true,
 163      "reasoning_levels": [
 164        "low",
 165        "medium",
 166        "high"
 167      ],
 168      "default_reasoning_effort": "medium",
 169      "supports_attachments": true,
 170      "options": {}
 171    },
 172    {
 173      "id": "anthropic/claude-3.7-sonnet:thinking",
 174      "name": "Anthropic: Claude 3.7 Sonnet (thinking)",
 175      "cost_per_1m_in": 3,
 176      "cost_per_1m_out": 15,
 177      "cost_per_1m_in_cached": 3.75,
 178      "cost_per_1m_out_cached": 0.3,
 179      "context_window": 200000,
 180      "default_max_tokens": 32000,
 181      "can_reason": true,
 182      "reasoning_levels": [
 183        "low",
 184        "medium",
 185        "high"
 186      ],
 187      "default_reasoning_effort": "medium",
 188      "supports_attachments": true,
 189      "options": {}
 190    },
 191    {
 192      "id": "anthropic/claude-haiku-4.5",
 193      "name": "Anthropic: Claude Haiku 4.5",
 194      "cost_per_1m_in": 1,
 195      "cost_per_1m_out": 5,
 196      "cost_per_1m_in_cached": 1.25,
 197      "cost_per_1m_out_cached": 0.09999999999999999,
 198      "context_window": 200000,
 199      "default_max_tokens": 32000,
 200      "can_reason": true,
 201      "reasoning_levels": [
 202        "low",
 203        "medium",
 204        "high"
 205      ],
 206      "default_reasoning_effort": "medium",
 207      "supports_attachments": true,
 208      "options": {}
 209    },
 210    {
 211      "id": "anthropic/claude-opus-4",
 212      "name": "Anthropic: Claude Opus 4",
 213      "cost_per_1m_in": 15,
 214      "cost_per_1m_out": 75,
 215      "cost_per_1m_in_cached": 18.75,
 216      "cost_per_1m_out_cached": 1.5,
 217      "context_window": 200000,
 218      "default_max_tokens": 16000,
 219      "can_reason": true,
 220      "reasoning_levels": [
 221        "low",
 222        "medium",
 223        "high"
 224      ],
 225      "default_reasoning_effort": "medium",
 226      "supports_attachments": true,
 227      "options": {}
 228    },
 229    {
 230      "id": "anthropic/claude-opus-4.1",
 231      "name": "Anthropic: Claude Opus 4.1",
 232      "cost_per_1m_in": 15,
 233      "cost_per_1m_out": 75,
 234      "cost_per_1m_in_cached": 18.75,
 235      "cost_per_1m_out_cached": 1.5,
 236      "context_window": 200000,
 237      "default_max_tokens": 16000,
 238      "can_reason": true,
 239      "reasoning_levels": [
 240        "low",
 241        "medium",
 242        "high"
 243      ],
 244      "default_reasoning_effort": "medium",
 245      "supports_attachments": true,
 246      "options": {}
 247    },
 248    {
 249      "id": "anthropic/claude-sonnet-4",
 250      "name": "Anthropic: Claude Sonnet 4",
 251      "cost_per_1m_in": 3,
 252      "cost_per_1m_out": 15,
 253      "cost_per_1m_in_cached": 3.75,
 254      "cost_per_1m_out_cached": 0.3,
 255      "context_window": 1000000,
 256      "default_max_tokens": 32000,
 257      "can_reason": true,
 258      "reasoning_levels": [
 259        "low",
 260        "medium",
 261        "high"
 262      ],
 263      "default_reasoning_effort": "medium",
 264      "supports_attachments": true,
 265      "options": {}
 266    },
 267    {
 268      "id": "anthropic/claude-sonnet-4.5",
 269      "name": "Anthropic: Claude Sonnet 4.5",
 270      "cost_per_1m_in": 3,
 271      "cost_per_1m_out": 15,
 272      "cost_per_1m_in_cached": 0,
 273      "cost_per_1m_out_cached": 0,
 274      "context_window": 1000000,
 275      "default_max_tokens": 32000,
 276      "can_reason": true,
 277      "reasoning_levels": [
 278        "low",
 279        "medium",
 280        "high"
 281      ],
 282      "default_reasoning_effort": "medium",
 283      "supports_attachments": true,
 284      "options": {}
 285    },
 286    {
 287      "id": "arcee-ai/virtuoso-large",
 288      "name": "Arcee AI: Virtuoso Large",
 289      "cost_per_1m_in": 0.75,
 290      "cost_per_1m_out": 1.2,
 291      "cost_per_1m_in_cached": 0,
 292      "cost_per_1m_out_cached": 0,
 293      "context_window": 131072,
 294      "default_max_tokens": 32000,
 295      "can_reason": false,
 296      "supports_attachments": false,
 297      "options": {}
 298    },
 299    {
 300      "id": "baidu/ernie-4.5-21b-a3b",
 301      "name": "Baidu: ERNIE 4.5 21B A3B",
 302      "cost_per_1m_in": 0.07,
 303      "cost_per_1m_out": 0.28,
 304      "cost_per_1m_in_cached": 0,
 305      "cost_per_1m_out_cached": 0,
 306      "context_window": 120000,
 307      "default_max_tokens": 4000,
 308      "can_reason": false,
 309      "supports_attachments": false,
 310      "options": {}
 311    },
 312    {
 313      "id": "baidu/ernie-4.5-vl-28b-a3b",
 314      "name": "Baidu: ERNIE 4.5 VL 28B A3B",
 315      "cost_per_1m_in": 0.14,
 316      "cost_per_1m_out": 0.56,
 317      "cost_per_1m_in_cached": 0,
 318      "cost_per_1m_out_cached": 0,
 319      "context_window": 30000,
 320      "default_max_tokens": 4000,
 321      "can_reason": true,
 322      "reasoning_levels": [
 323        "low",
 324        "medium",
 325        "high"
 326      ],
 327      "default_reasoning_effort": "medium",
 328      "supports_attachments": true,
 329      "options": {}
 330    },
 331    {
 332      "id": "deepcogito/cogito-v2-preview-llama-109b-moe",
 333      "name": "Cogito V2 Preview Llama 109B",
 334      "cost_per_1m_in": 0.18,
 335      "cost_per_1m_out": 0.59,
 336      "cost_per_1m_in_cached": 0,
 337      "cost_per_1m_out_cached": 0,
 338      "context_window": 32767,
 339      "default_max_tokens": 3276,
 340      "can_reason": true,
 341      "reasoning_levels": [
 342        "low",
 343        "medium",
 344        "high"
 345      ],
 346      "default_reasoning_effort": "medium",
 347      "supports_attachments": true,
 348      "options": {}
 349    },
 350    {
 351      "id": "cohere/command-r-08-2024",
 352      "name": "Cohere: Command R (08-2024)",
 353      "cost_per_1m_in": 0.15,
 354      "cost_per_1m_out": 0.6,
 355      "cost_per_1m_in_cached": 0,
 356      "cost_per_1m_out_cached": 0,
 357      "context_window": 128000,
 358      "default_max_tokens": 2000,
 359      "can_reason": false,
 360      "supports_attachments": false,
 361      "options": {}
 362    },
 363    {
 364      "id": "cohere/command-r-plus-08-2024",
 365      "name": "Cohere: Command R+ (08-2024)",
 366      "cost_per_1m_in": 2.5,
 367      "cost_per_1m_out": 10,
 368      "cost_per_1m_in_cached": 0,
 369      "cost_per_1m_out_cached": 0,
 370      "context_window": 128000,
 371      "default_max_tokens": 2000,
 372      "can_reason": false,
 373      "supports_attachments": false,
 374      "options": {}
 375    },
 376    {
 377      "id": "deepcogito/cogito-v2-preview-llama-405b",
 378      "name": "Deep Cogito: Cogito V2 Preview Llama 405B",
 379      "cost_per_1m_in": 3.5,
 380      "cost_per_1m_out": 3.5,
 381      "cost_per_1m_in_cached": 0,
 382      "cost_per_1m_out_cached": 0,
 383      "context_window": 32768,
 384      "default_max_tokens": 3276,
 385      "can_reason": true,
 386      "reasoning_levels": [
 387        "low",
 388        "medium",
 389        "high"
 390      ],
 391      "default_reasoning_effort": "medium",
 392      "supports_attachments": false,
 393      "options": {}
 394    },
 395    {
 396      "id": "deepcogito/cogito-v2-preview-llama-70b",
 397      "name": "Deep Cogito: Cogito V2 Preview Llama 70B",
 398      "cost_per_1m_in": 0.88,
 399      "cost_per_1m_out": 0.88,
 400      "cost_per_1m_in_cached": 0,
 401      "cost_per_1m_out_cached": 0,
 402      "context_window": 32768,
 403      "default_max_tokens": 3276,
 404      "can_reason": true,
 405      "reasoning_levels": [
 406        "low",
 407        "medium",
 408        "high"
 409      ],
 410      "default_reasoning_effort": "medium",
 411      "supports_attachments": false,
 412      "options": {}
 413    },
 414    {
 415      "id": "deepseek/deepseek-chat",
 416      "name": "DeepSeek: DeepSeek V3",
 417      "cost_per_1m_in": 0.39999999999999997,
 418      "cost_per_1m_out": 1.3,
 419      "cost_per_1m_in_cached": 0,
 420      "cost_per_1m_out_cached": 0,
 421      "context_window": 64000,
 422      "default_max_tokens": 8000,
 423      "can_reason": false,
 424      "supports_attachments": false,
 425      "options": {}
 426    },
 427    {
 428      "id": "deepseek/deepseek-chat-v3-0324",
 429      "name": "DeepSeek: DeepSeek V3 0324",
 430      "cost_per_1m_in": 0.5,
 431      "cost_per_1m_out": 1.5,
 432      "cost_per_1m_in_cached": 0,
 433      "cost_per_1m_out_cached": 0,
 434      "context_window": 163840,
 435      "default_max_tokens": 16384,
 436      "can_reason": false,
 437      "supports_attachments": false,
 438      "options": {}
 439    },
 440    {
 441      "id": "deepseek/deepseek-chat-v3-0324:free",
 442      "name": "DeepSeek: DeepSeek V3 0324 (free)",
 443      "cost_per_1m_in": 0,
 444      "cost_per_1m_out": 0,
 445      "cost_per_1m_in_cached": 0,
 446      "cost_per_1m_out_cached": 0,
 447      "context_window": 163840,
 448      "default_max_tokens": 16384,
 449      "can_reason": false,
 450      "supports_attachments": false,
 451      "options": {}
 452    },
 453    {
 454      "id": "deepseek/deepseek-chat-v3.1",
 455      "name": "DeepSeek: DeepSeek V3.1",
 456      "cost_per_1m_in": 0.27,
 457      "cost_per_1m_out": 1,
 458      "cost_per_1m_in_cached": 0,
 459      "cost_per_1m_out_cached": 0,
 460      "context_window": 163840,
 461      "default_max_tokens": 16384,
 462      "can_reason": true,
 463      "reasoning_levels": [
 464        "low",
 465        "medium",
 466        "high"
 467      ],
 468      "default_reasoning_effort": "medium",
 469      "supports_attachments": false,
 470      "options": {}
 471    },
 472    {
 473      "id": "deepseek/deepseek-v3.1-terminus",
 474      "name": "DeepSeek: DeepSeek V3.1 Terminus",
 475      "cost_per_1m_in": 0.22999999999999998,
 476      "cost_per_1m_out": 0.8999999999999999,
 477      "cost_per_1m_in_cached": 0,
 478      "cost_per_1m_out_cached": 0,
 479      "context_window": 163840,
 480      "default_max_tokens": 81920,
 481      "can_reason": true,
 482      "reasoning_levels": [
 483        "low",
 484        "medium",
 485        "high"
 486      ],
 487      "default_reasoning_effort": "medium",
 488      "supports_attachments": false,
 489      "options": {}
 490    },
 491    {
 492      "id": "deepseek/deepseek-v3.1-terminus:exacto",
 493      "name": "DeepSeek: DeepSeek V3.1 Terminus (exacto)",
 494      "cost_per_1m_in": 0.27,
 495      "cost_per_1m_out": 1,
 496      "cost_per_1m_in_cached": 0,
 497      "cost_per_1m_out_cached": 0,
 498      "context_window": 131072,
 499      "default_max_tokens": 32768,
 500      "can_reason": true,
 501      "reasoning_levels": [
 502        "low",
 503        "medium",
 504        "high"
 505      ],
 506      "default_reasoning_effort": "medium",
 507      "supports_attachments": false,
 508      "options": {}
 509    },
 510    {
 511      "id": "deepseek/deepseek-v3.2-exp",
 512      "name": "DeepSeek: DeepSeek V3.2 Exp",
 513      "cost_per_1m_in": 0.27,
 514      "cost_per_1m_out": 0.39999999999999997,
 515      "cost_per_1m_in_cached": 0,
 516      "cost_per_1m_out_cached": 0,
 517      "context_window": 163840,
 518      "default_max_tokens": 16384,
 519      "can_reason": true,
 520      "reasoning_levels": [
 521        "low",
 522        "medium",
 523        "high"
 524      ],
 525      "default_reasoning_effort": "medium",
 526      "supports_attachments": false,
 527      "options": {}
 528    },
 529    {
 530      "id": "deepseek/deepseek-r1",
 531      "name": "DeepSeek: R1",
 532      "cost_per_1m_in": 0.7,
 533      "cost_per_1m_out": 2.4,
 534      "cost_per_1m_in_cached": 0,
 535      "cost_per_1m_out_cached": 0,
 536      "context_window": 163840,
 537      "default_max_tokens": 81920,
 538      "can_reason": true,
 539      "reasoning_levels": [
 540        "low",
 541        "medium",
 542        "high"
 543      ],
 544      "default_reasoning_effort": "medium",
 545      "supports_attachments": false,
 546      "options": {}
 547    },
 548    {
 549      "id": "deepseek/deepseek-r1-0528",
 550      "name": "DeepSeek: R1 0528",
 551      "cost_per_1m_in": 0.5,
 552      "cost_per_1m_out": 2.1799999999999997,
 553      "cost_per_1m_in_cached": 0,
 554      "cost_per_1m_out_cached": 0,
 555      "context_window": 163840,
 556      "default_max_tokens": 81920,
 557      "can_reason": true,
 558      "reasoning_levels": [
 559        "low",
 560        "medium",
 561        "high"
 562      ],
 563      "default_reasoning_effort": "medium",
 564      "supports_attachments": false,
 565      "options": {}
 566    },
 567    {
 568      "id": "deepseek/deepseek-r1-distill-llama-70b",
 569      "name": "DeepSeek: R1 Distill Llama 70B",
 570      "cost_per_1m_in": 0.03,
 571      "cost_per_1m_out": 0.13,
 572      "cost_per_1m_in_cached": 0,
 573      "cost_per_1m_out_cached": 0,
 574      "context_window": 131072,
 575      "default_max_tokens": 65536,
 576      "can_reason": true,
 577      "reasoning_levels": [
 578        "low",
 579        "medium",
 580        "high"
 581      ],
 582      "default_reasoning_effort": "medium",
 583      "supports_attachments": false,
 584      "options": {}
 585    },
 586    {
 587      "id": "google/gemini-2.0-flash-001",
 588      "name": "Google: Gemini 2.0 Flash",
 589      "cost_per_1m_in": 0.09999999999999999,
 590      "cost_per_1m_out": 0.39999999999999997,
 591      "cost_per_1m_in_cached": 0.18330000000000002,
 592      "cost_per_1m_out_cached": 0.024999999999999998,
 593      "context_window": 1048576,
 594      "default_max_tokens": 4096,
 595      "can_reason": false,
 596      "supports_attachments": true,
 597      "options": {}
 598    },
 599    {
 600      "id": "google/gemini-2.0-flash-exp:free",
 601      "name": "Google: Gemini 2.0 Flash Experimental (free)",
 602      "cost_per_1m_in": 0,
 603      "cost_per_1m_out": 0,
 604      "cost_per_1m_in_cached": 0,
 605      "cost_per_1m_out_cached": 0,
 606      "context_window": 1048576,
 607      "default_max_tokens": 4096,
 608      "can_reason": false,
 609      "supports_attachments": true,
 610      "options": {}
 611    },
 612    {
 613      "id": "google/gemini-2.0-flash-lite-001",
 614      "name": "Google: Gemini 2.0 Flash Lite",
 615      "cost_per_1m_in": 0.075,
 616      "cost_per_1m_out": 0.3,
 617      "cost_per_1m_in_cached": 0,
 618      "cost_per_1m_out_cached": 0,
 619      "context_window": 1048576,
 620      "default_max_tokens": 4096,
 621      "can_reason": false,
 622      "supports_attachments": true,
 623      "options": {}
 624    },
 625    {
 626      "id": "google/gemini-2.5-flash",
 627      "name": "Google: Gemini 2.5 Flash",
 628      "cost_per_1m_in": 0.3,
 629      "cost_per_1m_out": 2.5,
 630      "cost_per_1m_in_cached": 0.3833,
 631      "cost_per_1m_out_cached": 0.03,
 632      "context_window": 1048576,
 633      "default_max_tokens": 32767,
 634      "can_reason": true,
 635      "reasoning_levels": [
 636        "low",
 637        "medium",
 638        "high"
 639      ],
 640      "default_reasoning_effort": "medium",
 641      "supports_attachments": true,
 642      "options": {}
 643    },
 644    {
 645      "id": "google/gemini-2.5-flash-lite",
 646      "name": "Google: Gemini 2.5 Flash Lite",
 647      "cost_per_1m_in": 0.09999999999999999,
 648      "cost_per_1m_out": 0.39999999999999997,
 649      "cost_per_1m_in_cached": 0.18330000000000002,
 650      "cost_per_1m_out_cached": 0.01,
 651      "context_window": 1048576,
 652      "default_max_tokens": 32767,
 653      "can_reason": true,
 654      "reasoning_levels": [
 655        "low",
 656        "medium",
 657        "high"
 658      ],
 659      "default_reasoning_effort": "medium",
 660      "supports_attachments": true,
 661      "options": {}
 662    },
 663    {
 664      "id": "google/gemini-2.5-flash-lite-preview-06-17",
 665      "name": "Google: Gemini 2.5 Flash Lite Preview 06-17",
 666      "cost_per_1m_in": 0.09999999999999999,
 667      "cost_per_1m_out": 0.39999999999999997,
 668      "cost_per_1m_in_cached": 0.18330000000000002,
 669      "cost_per_1m_out_cached": 0.024999999999999998,
 670      "context_window": 1048576,
 671      "default_max_tokens": 32767,
 672      "can_reason": true,
 673      "reasoning_levels": [
 674        "low",
 675        "medium",
 676        "high"
 677      ],
 678      "default_reasoning_effort": "medium",
 679      "supports_attachments": true,
 680      "options": {}
 681    },
 682    {
 683      "id": "google/gemini-2.5-flash-lite-preview-09-2025",
 684      "name": "Google: Gemini 2.5 Flash Lite Preview 09-2025",
 685      "cost_per_1m_in": 0.09999999999999999,
 686      "cost_per_1m_out": 0.39999999999999997,
 687      "cost_per_1m_in_cached": 0,
 688      "cost_per_1m_out_cached": 0,
 689      "context_window": 1048576,
 690      "default_max_tokens": 32768,
 691      "can_reason": true,
 692      "reasoning_levels": [
 693        "low",
 694        "medium",
 695        "high"
 696      ],
 697      "default_reasoning_effort": "medium",
 698      "supports_attachments": true,
 699      "options": {}
 700    },
 701    {
 702      "id": "google/gemini-2.5-flash-preview-09-2025",
 703      "name": "Google: Gemini 2.5 Flash Preview 09-2025",
 704      "cost_per_1m_in": 0.3,
 705      "cost_per_1m_out": 2.5,
 706      "cost_per_1m_in_cached": 0.3833,
 707      "cost_per_1m_out_cached": 0.075,
 708      "context_window": 1048576,
 709      "default_max_tokens": 32768,
 710      "can_reason": true,
 711      "reasoning_levels": [
 712        "low",
 713        "medium",
 714        "high"
 715      ],
 716      "default_reasoning_effort": "medium",
 717      "supports_attachments": true,
 718      "options": {}
 719    },
 720    {
 721      "id": "google/gemini-2.5-pro",
 722      "name": "Google: Gemini 2.5 Pro",
 723      "cost_per_1m_in": 1.25,
 724      "cost_per_1m_out": 10,
 725      "cost_per_1m_in_cached": 1.625,
 726      "cost_per_1m_out_cached": 0.125,
 727      "context_window": 1048576,
 728      "default_max_tokens": 32768,
 729      "can_reason": true,
 730      "reasoning_levels": [
 731        "low",
 732        "medium",
 733        "high"
 734      ],
 735      "default_reasoning_effort": "medium",
 736      "supports_attachments": true,
 737      "options": {}
 738    },
 739    {
 740      "id": "google/gemini-2.5-pro-preview-05-06",
 741      "name": "Google: Gemini 2.5 Pro Preview 05-06",
 742      "cost_per_1m_in": 1.25,
 743      "cost_per_1m_out": 10,
 744      "cost_per_1m_in_cached": 1.625,
 745      "cost_per_1m_out_cached": 0.125,
 746      "context_window": 1048576,
 747      "default_max_tokens": 32768,
 748      "can_reason": true,
 749      "reasoning_levels": [
 750        "low",
 751        "medium",
 752        "high"
 753      ],
 754      "default_reasoning_effort": "medium",
 755      "supports_attachments": true,
 756      "options": {}
 757    },
 758    {
 759      "id": "google/gemini-2.5-pro-preview",
 760      "name": "Google: Gemini 2.5 Pro Preview 06-05",
 761      "cost_per_1m_in": 1.25,
 762      "cost_per_1m_out": 10,
 763      "cost_per_1m_in_cached": 1.625,
 764      "cost_per_1m_out_cached": 0.125,
 765      "context_window": 1048576,
 766      "default_max_tokens": 32768,
 767      "can_reason": true,
 768      "reasoning_levels": [
 769        "low",
 770        "medium",
 771        "high"
 772      ],
 773      "default_reasoning_effort": "medium",
 774      "supports_attachments": true,
 775      "options": {}
 776    },
 777    {
 778      "id": "inception/mercury",
 779      "name": "Inception: Mercury",
 780      "cost_per_1m_in": 0.25,
 781      "cost_per_1m_out": 1,
 782      "cost_per_1m_in_cached": 0,
 783      "cost_per_1m_out_cached": 0,
 784      "context_window": 128000,
 785      "default_max_tokens": 8192,
 786      "can_reason": false,
 787      "supports_attachments": false,
 788      "options": {}
 789    },
 790    {
 791      "id": "inception/mercury-coder",
 792      "name": "Inception: Mercury Coder",
 793      "cost_per_1m_in": 0.25,
 794      "cost_per_1m_out": 1,
 795      "cost_per_1m_in_cached": 0,
 796      "cost_per_1m_out_cached": 0,
 797      "context_window": 128000,
 798      "default_max_tokens": 8192,
 799      "can_reason": false,
 800      "supports_attachments": false,
 801      "options": {}
 802    },
 803    {
 804      "id": "meituan/longcat-flash-chat:free",
 805      "name": "Meituan: LongCat Flash Chat (free)",
 806      "cost_per_1m_in": 0,
 807      "cost_per_1m_out": 0,
 808      "cost_per_1m_in_cached": 0,
 809      "cost_per_1m_out_cached": 0,
 810      "context_window": 131072,
 811      "default_max_tokens": 65536,
 812      "can_reason": false,
 813      "supports_attachments": false,
 814      "options": {}
 815    },
 816    {
 817      "id": "meta-llama/llama-3.1-405b-instruct",
 818      "name": "Meta: Llama 3.1 405B Instruct",
 819      "cost_per_1m_in": 1,
 820      "cost_per_1m_out": 3,
 821      "cost_per_1m_in_cached": 0,
 822      "cost_per_1m_out_cached": 0,
 823      "context_window": 131072,
 824      "default_max_tokens": 13107,
 825      "can_reason": false,
 826      "supports_attachments": false,
 827      "options": {}
 828    },
 829    {
 830      "id": "meta-llama/llama-3.1-70b-instruct",
 831      "name": "Meta: Llama 3.1 70B Instruct",
 832      "cost_per_1m_in": 0.39999999999999997,
 833      "cost_per_1m_out": 0.39999999999999997,
 834      "cost_per_1m_in_cached": 0,
 835      "cost_per_1m_out_cached": 0,
 836      "context_window": 131072,
 837      "default_max_tokens": 8192,
 838      "can_reason": false,
 839      "supports_attachments": false,
 840      "options": {}
 841    },
 842    {
 843      "id": "meta-llama/llama-3.3-70b-instruct",
 844      "name": "Meta: Llama 3.3 70B Instruct",
 845      "cost_per_1m_in": 0.13,
 846      "cost_per_1m_out": 0.38,
 847      "cost_per_1m_in_cached": 0,
 848      "cost_per_1m_out_cached": 0,
 849      "context_window": 131072,
 850      "default_max_tokens": 8192,
 851      "can_reason": false,
 852      "supports_attachments": false,
 853      "options": {}
 854    },
 855    {
 856      "id": "meta-llama/llama-3.3-70b-instruct:free",
 857      "name": "Meta: Llama 3.3 70B Instruct (free)",
 858      "cost_per_1m_in": 0,
 859      "cost_per_1m_out": 0,
 860      "cost_per_1m_in_cached": 0,
 861      "cost_per_1m_out_cached": 0,
 862      "context_window": 131072,
 863      "default_max_tokens": 13107,
 864      "can_reason": false,
 865      "supports_attachments": false,
 866      "options": {}
 867    },
 868    {
 869      "id": "meta-llama/llama-3.3-8b-instruct:free",
 870      "name": "Meta: Llama 3.3 8B Instruct (free)",
 871      "cost_per_1m_in": 0,
 872      "cost_per_1m_out": 0,
 873      "cost_per_1m_in_cached": 0,
 874      "cost_per_1m_out_cached": 0,
 875      "context_window": 128000,
 876      "default_max_tokens": 2014,
 877      "can_reason": false,
 878      "supports_attachments": false,
 879      "options": {}
 880    },
 881    {
 882      "id": "meta-llama/llama-4-maverick",
 883      "name": "Meta: Llama 4 Maverick",
 884      "cost_per_1m_in": 0.27,
 885      "cost_per_1m_out": 0.85,
 886      "cost_per_1m_in_cached": 0,
 887      "cost_per_1m_out_cached": 0,
 888      "context_window": 1048576,
 889      "default_max_tokens": 104857,
 890      "can_reason": false,
 891      "supports_attachments": true,
 892      "options": {}
 893    },
 894    {
 895      "id": "meta-llama/llama-4-maverick:free",
 896      "name": "Meta: Llama 4 Maverick (free)",
 897      "cost_per_1m_in": 0,
 898      "cost_per_1m_out": 0,
 899      "cost_per_1m_in_cached": 0,
 900      "cost_per_1m_out_cached": 0,
 901      "context_window": 128000,
 902      "default_max_tokens": 2014,
 903      "can_reason": false,
 904      "supports_attachments": true,
 905      "options": {}
 906    },
 907    {
 908      "id": "meta-llama/llama-4-scout",
 909      "name": "Meta: Llama 4 Scout",
 910      "cost_per_1m_in": 0.15,
 911      "cost_per_1m_out": 0.6,
 912      "cost_per_1m_in_cached": 0,
 913      "cost_per_1m_out_cached": 0,
 914      "context_window": 1048576,
 915      "default_max_tokens": 104857,
 916      "can_reason": false,
 917      "supports_attachments": true,
 918      "options": {}
 919    },
 920    {
 921      "id": "meta-llama/llama-4-scout:free",
 922      "name": "Meta: Llama 4 Scout (free)",
 923      "cost_per_1m_in": 0,
 924      "cost_per_1m_out": 0,
 925      "cost_per_1m_in_cached": 0,
 926      "cost_per_1m_out_cached": 0,
 927      "context_window": 128000,
 928      "default_max_tokens": 2014,
 929      "can_reason": false,
 930      "supports_attachments": true,
 931      "options": {}
 932    },
 933    {
 934      "id": "microsoft/phi-3-medium-128k-instruct",
 935      "name": "Microsoft: Phi-3 Medium 128K Instruct",
 936      "cost_per_1m_in": 1,
 937      "cost_per_1m_out": 1,
 938      "cost_per_1m_in_cached": 0,
 939      "cost_per_1m_out_cached": 0,
 940      "context_window": 128000,
 941      "default_max_tokens": 12800,
 942      "can_reason": false,
 943      "supports_attachments": false,
 944      "options": {}
 945    },
 946    {
 947      "id": "microsoft/phi-3-mini-128k-instruct",
 948      "name": "Microsoft: Phi-3 Mini 128K Instruct",
 949      "cost_per_1m_in": 0.09999999999999999,
 950      "cost_per_1m_out": 0.09999999999999999,
 951      "cost_per_1m_in_cached": 0,
 952      "cost_per_1m_out_cached": 0,
 953      "context_window": 128000,
 954      "default_max_tokens": 12800,
 955      "can_reason": false,
 956      "supports_attachments": false,
 957      "options": {}
 958    },
 959    {
 960      "id": "microsoft/phi-3.5-mini-128k-instruct",
 961      "name": "Microsoft: Phi-3.5 Mini 128K Instruct",
 962      "cost_per_1m_in": 0.09999999999999999,
 963      "cost_per_1m_out": 0.09999999999999999,
 964      "cost_per_1m_in_cached": 0,
 965      "cost_per_1m_out_cached": 0,
 966      "context_window": 128000,
 967      "default_max_tokens": 12800,
 968      "can_reason": false,
 969      "supports_attachments": false,
 970      "options": {}
 971    },
 972    {
 973      "id": "minimax/minimax-m2:free",
 974      "name": "MiniMax: MiniMax M2 (free)",
 975      "cost_per_1m_in": 0,
 976      "cost_per_1m_out": 0,
 977      "cost_per_1m_in_cached": 0,
 978      "cost_per_1m_out_cached": 0,
 979      "context_window": 204800,
 980      "default_max_tokens": 65536,
 981      "can_reason": true,
 982      "reasoning_levels": [
 983        "low",
 984        "medium",
 985        "high"
 986      ],
 987      "default_reasoning_effort": "medium",
 988      "supports_attachments": false,
 989      "options": {}
 990    },
 991    {
 992      "id": "mistralai/mistral-large",
 993      "name": "Mistral Large",
 994      "cost_per_1m_in": 2,
 995      "cost_per_1m_out": 6,
 996      "cost_per_1m_in_cached": 0,
 997      "cost_per_1m_out_cached": 0,
 998      "context_window": 128000,
 999      "default_max_tokens": 12800,
1000      "can_reason": false,
1001      "supports_attachments": false,
1002      "options": {}
1003    },
1004    {
1005      "id": "mistralai/mistral-large-2407",
1006      "name": "Mistral Large 2407",
1007      "cost_per_1m_in": 2,
1008      "cost_per_1m_out": 6,
1009      "cost_per_1m_in_cached": 0,
1010      "cost_per_1m_out_cached": 0,
1011      "context_window": 131072,
1012      "default_max_tokens": 13107,
1013      "can_reason": false,
1014      "supports_attachments": false,
1015      "options": {}
1016    },
1017    {
1018      "id": "mistralai/mistral-large-2411",
1019      "name": "Mistral Large 2411",
1020      "cost_per_1m_in": 2,
1021      "cost_per_1m_out": 6,
1022      "cost_per_1m_in_cached": 0,
1023      "cost_per_1m_out_cached": 0,
1024      "context_window": 131072,
1025      "default_max_tokens": 13107,
1026      "can_reason": false,
1027      "supports_attachments": false,
1028      "options": {}
1029    },
1030    {
1031      "id": "mistralai/mistral-small",
1032      "name": "Mistral Small",
1033      "cost_per_1m_in": 0.19999999999999998,
1034      "cost_per_1m_out": 0.6,
1035      "cost_per_1m_in_cached": 0,
1036      "cost_per_1m_out_cached": 0,
1037      "context_window": 32768,
1038      "default_max_tokens": 3276,
1039      "can_reason": false,
1040      "supports_attachments": false,
1041      "options": {}
1042    },
1043    {
1044      "id": "mistralai/mistral-tiny",
1045      "name": "Mistral Tiny",
1046      "cost_per_1m_in": 0.25,
1047      "cost_per_1m_out": 0.25,
1048      "cost_per_1m_in_cached": 0,
1049      "cost_per_1m_out_cached": 0,
1050      "context_window": 32768,
1051      "default_max_tokens": 3276,
1052      "can_reason": false,
1053      "supports_attachments": false,
1054      "options": {}
1055    },
1056    {
1057      "id": "mistralai/codestral-2501",
1058      "name": "Mistral: Codestral 2501",
1059      "cost_per_1m_in": 0.3,
1060      "cost_per_1m_out": 0.8999999999999999,
1061      "cost_per_1m_in_cached": 0,
1062      "cost_per_1m_out_cached": 0,
1063      "context_window": 262144,
1064      "default_max_tokens": 26214,
1065      "can_reason": false,
1066      "supports_attachments": false,
1067      "options": {}
1068    },
1069    {
1070      "id": "mistralai/codestral-2508",
1071      "name": "Mistral: Codestral 2508",
1072      "cost_per_1m_in": 0.3,
1073      "cost_per_1m_out": 0.8999999999999999,
1074      "cost_per_1m_in_cached": 0,
1075      "cost_per_1m_out_cached": 0,
1076      "context_window": 256000,
1077      "default_max_tokens": 25600,
1078      "can_reason": false,
1079      "supports_attachments": false,
1080      "options": {}
1081    },
1082    {
1083      "id": "mistralai/devstral-medium",
1084      "name": "Mistral: Devstral Medium",
1085      "cost_per_1m_in": 0.39999999999999997,
1086      "cost_per_1m_out": 2,
1087      "cost_per_1m_in_cached": 0,
1088      "cost_per_1m_out_cached": 0,
1089      "context_window": 131072,
1090      "default_max_tokens": 13107,
1091      "can_reason": false,
1092      "supports_attachments": false,
1093      "options": {}
1094    },
1095    {
1096      "id": "mistralai/devstral-small-2505",
1097      "name": "Mistral: Devstral Small 2505",
1098      "cost_per_1m_in": 0.049999999999999996,
1099      "cost_per_1m_out": 0.22,
1100      "cost_per_1m_in_cached": 0,
1101      "cost_per_1m_out_cached": 0,
1102      "context_window": 131072,
1103      "default_max_tokens": 65536,
1104      "can_reason": false,
1105      "supports_attachments": false,
1106      "options": {}
1107    },
1108    {
1109      "id": "mistralai/devstral-small-2505:free",
1110      "name": "Mistral: Devstral Small 2505 (free)",
1111      "cost_per_1m_in": 0,
1112      "cost_per_1m_out": 0,
1113      "cost_per_1m_in_cached": 0,
1114      "cost_per_1m_out_cached": 0,
1115      "context_window": 32768,
1116      "default_max_tokens": 3276,
1117      "can_reason": false,
1118      "supports_attachments": false,
1119      "options": {}
1120    },
1121    {
1122      "id": "mistralai/magistral-medium-2506",
1123      "name": "Mistral: Magistral Medium 2506",
1124      "cost_per_1m_in": 2,
1125      "cost_per_1m_out": 5,
1126      "cost_per_1m_in_cached": 0,
1127      "cost_per_1m_out_cached": 0,
1128      "context_window": 40960,
1129      "default_max_tokens": 20000,
1130      "can_reason": true,
1131      "reasoning_levels": [
1132        "low",
1133        "medium",
1134        "high"
1135      ],
1136      "default_reasoning_effort": "medium",
1137      "supports_attachments": false,
1138      "options": {}
1139    },
1140    {
1141      "id": "mistralai/magistral-medium-2506:thinking",
1142      "name": "Mistral: Magistral Medium 2506 (thinking)",
1143      "cost_per_1m_in": 2,
1144      "cost_per_1m_out": 5,
1145      "cost_per_1m_in_cached": 0,
1146      "cost_per_1m_out_cached": 0,
1147      "context_window": 40960,
1148      "default_max_tokens": 20000,
1149      "can_reason": true,
1150      "reasoning_levels": [
1151        "low",
1152        "medium",
1153        "high"
1154      ],
1155      "default_reasoning_effort": "medium",
1156      "supports_attachments": false,
1157      "options": {}
1158    },
1159    {
1160      "id": "mistralai/magistral-small-2506",
1161      "name": "Mistral: Magistral Small 2506",
1162      "cost_per_1m_in": 0.5,
1163      "cost_per_1m_out": 1.5,
1164      "cost_per_1m_in_cached": 0,
1165      "cost_per_1m_out_cached": 0,
1166      "context_window": 40000,
1167      "default_max_tokens": 20000,
1168      "can_reason": true,
1169      "reasoning_levels": [
1170        "low",
1171        "medium",
1172        "high"
1173      ],
1174      "default_reasoning_effort": "medium",
1175      "supports_attachments": false,
1176      "options": {}
1177    },
1178    {
1179      "id": "mistralai/ministral-8b",
1180      "name": "Mistral: Ministral 8B",
1181      "cost_per_1m_in": 0.09999999999999999,
1182      "cost_per_1m_out": 0.09999999999999999,
1183      "cost_per_1m_in_cached": 0,
1184      "cost_per_1m_out_cached": 0,
1185      "context_window": 131072,
1186      "default_max_tokens": 13107,
1187      "can_reason": false,
1188      "supports_attachments": false,
1189      "options": {}
1190    },
1191    {
1192      "id": "mistralai/mistral-7b-instruct",
1193      "name": "Mistral: Mistral 7B Instruct",
1194      "cost_per_1m_in": 0.028,
1195      "cost_per_1m_out": 0.054,
1196      "cost_per_1m_in_cached": 0,
1197      "cost_per_1m_out_cached": 0,
1198      "context_window": 32768,
1199      "default_max_tokens": 8192,
1200      "can_reason": false,
1201      "supports_attachments": false,
1202      "options": {}
1203    },
1204    {
1205      "id": "mistralai/mistral-7b-instruct:free",
1206      "name": "Mistral: Mistral 7B Instruct (free)",
1207      "cost_per_1m_in": 0,
1208      "cost_per_1m_out": 0,
1209      "cost_per_1m_in_cached": 0,
1210      "cost_per_1m_out_cached": 0,
1211      "context_window": 32768,
1212      "default_max_tokens": 8192,
1213      "can_reason": false,
1214      "supports_attachments": false,
1215      "options": {}
1216    },
1217    {
1218      "id": "mistralai/mistral-7b-instruct-v0.3",
1219      "name": "Mistral: Mistral 7B Instruct v0.3",
1220      "cost_per_1m_in": 0.028,
1221      "cost_per_1m_out": 0.054,
1222      "cost_per_1m_in_cached": 0,
1223      "cost_per_1m_out_cached": 0,
1224      "context_window": 32768,
1225      "default_max_tokens": 8192,
1226      "can_reason": false,
1227      "supports_attachments": false,
1228      "options": {}
1229    },
1230    {
1231      "id": "mistralai/mistral-medium-3",
1232      "name": "Mistral: Mistral Medium 3",
1233      "cost_per_1m_in": 0.39999999999999997,
1234      "cost_per_1m_out": 2,
1235      "cost_per_1m_in_cached": 0,
1236      "cost_per_1m_out_cached": 0,
1237      "context_window": 131072,
1238      "default_max_tokens": 13107,
1239      "can_reason": false,
1240      "supports_attachments": true,
1241      "options": {}
1242    },
1243    {
1244      "id": "mistralai/mistral-medium-3.1",
1245      "name": "Mistral: Mistral Medium 3.1",
1246      "cost_per_1m_in": 0.39999999999999997,
1247      "cost_per_1m_out": 2,
1248      "cost_per_1m_in_cached": 0,
1249      "cost_per_1m_out_cached": 0,
1250      "context_window": 131072,
1251      "default_max_tokens": 13107,
1252      "can_reason": false,
1253      "supports_attachments": true,
1254      "options": {}
1255    },
1256    {
1257      "id": "mistralai/mistral-nemo",
1258      "name": "Mistral: Mistral Nemo",
1259      "cost_per_1m_in": 0.15,
1260      "cost_per_1m_out": 0.15,
1261      "cost_per_1m_in_cached": 0,
1262      "cost_per_1m_out_cached": 0,
1263      "context_window": 131072,
1264      "default_max_tokens": 13107,
1265      "can_reason": false,
1266      "supports_attachments": false,
1267      "options": {}
1268    },
1269    {
1270      "id": "mistralai/mistral-small-24b-instruct-2501",
1271      "name": "Mistral: Mistral Small 3",
1272      "cost_per_1m_in": 0.09999999999999999,
1273      "cost_per_1m_out": 0.3,
1274      "cost_per_1m_in_cached": 0,
1275      "cost_per_1m_out_cached": 0,
1276      "context_window": 32768,
1277      "default_max_tokens": 3276,
1278      "can_reason": false,
1279      "supports_attachments": false,
1280      "options": {}
1281    },
1282    {
1283      "id": "mistralai/mistral-small-3.1-24b-instruct",
1284      "name": "Mistral: Mistral Small 3.1 24B",
1285      "cost_per_1m_in": 0.09999999999999999,
1286      "cost_per_1m_out": 0.3,
1287      "cost_per_1m_in_cached": 0,
1288      "cost_per_1m_out_cached": 0,
1289      "context_window": 131072,
1290      "default_max_tokens": 13107,
1291      "can_reason": false,
1292      "supports_attachments": true,
1293      "options": {}
1294    },
1295    {
1296      "id": "mistralai/mistral-small-3.1-24b-instruct:free",
1297      "name": "Mistral: Mistral Small 3.1 24B (free)",
1298      "cost_per_1m_in": 0,
1299      "cost_per_1m_out": 0,
1300      "cost_per_1m_in_cached": 0,
1301      "cost_per_1m_out_cached": 0,
1302      "context_window": 96000,
1303      "default_max_tokens": 48000,
1304      "can_reason": false,
1305      "supports_attachments": true,
1306      "options": {}
1307    },
1308    {
1309      "id": "mistralai/mistral-small-3.2-24b-instruct",
1310      "name": "Mistral: Mistral Small 3.2 24B",
1311      "cost_per_1m_in": 0.09999999999999999,
1312      "cost_per_1m_out": 0.3,
1313      "cost_per_1m_in_cached": 0,
1314      "cost_per_1m_out_cached": 0,
1315      "context_window": 131072,
1316      "default_max_tokens": 13107,
1317      "can_reason": false,
1318      "supports_attachments": true,
1319      "options": {}
1320    },
1321    {
1322      "id": "mistralai/mistral-small-3.2-24b-instruct:free",
1323      "name": "Mistral: Mistral Small 3.2 24B (free)",
1324      "cost_per_1m_in": 0,
1325      "cost_per_1m_out": 0,
1326      "cost_per_1m_in_cached": 0,
1327      "cost_per_1m_out_cached": 0,
1328      "context_window": 131072,
1329      "default_max_tokens": 13107,
1330      "can_reason": false,
1331      "supports_attachments": true,
1332      "options": {}
1333    },
1334    {
1335      "id": "mistralai/mixtral-8x22b-instruct",
1336      "name": "Mistral: Mixtral 8x22B Instruct",
1337      "cost_per_1m_in": 2,
1338      "cost_per_1m_out": 6,
1339      "cost_per_1m_in_cached": 0,
1340      "cost_per_1m_out_cached": 0,
1341      "context_window": 65536,
1342      "default_max_tokens": 6553,
1343      "can_reason": false,
1344      "supports_attachments": false,
1345      "options": {}
1346    },
1347    {
1348      "id": "mistralai/mixtral-8x7b-instruct",
1349      "name": "Mistral: Mixtral 8x7B Instruct",
1350      "cost_per_1m_in": 0.54,
1351      "cost_per_1m_out": 0.54,
1352      "cost_per_1m_in_cached": 0,
1353      "cost_per_1m_out_cached": 0,
1354      "context_window": 32768,
1355      "default_max_tokens": 8192,
1356      "can_reason": false,
1357      "supports_attachments": false,
1358      "options": {}
1359    },
1360    {
1361      "id": "mistralai/pixtral-12b",
1362      "name": "Mistral: Pixtral 12B",
1363      "cost_per_1m_in": 0.15,
1364      "cost_per_1m_out": 0.15,
1365      "cost_per_1m_in_cached": 0,
1366      "cost_per_1m_out_cached": 0,
1367      "context_window": 131072,
1368      "default_max_tokens": 13107,
1369      "can_reason": false,
1370      "supports_attachments": true,
1371      "options": {}
1372    },
1373    {
1374      "id": "mistralai/pixtral-large-2411",
1375      "name": "Mistral: Pixtral Large 2411",
1376      "cost_per_1m_in": 2,
1377      "cost_per_1m_out": 6,
1378      "cost_per_1m_in_cached": 0,
1379      "cost_per_1m_out_cached": 0,
1380      "context_window": 131072,
1381      "default_max_tokens": 13107,
1382      "can_reason": false,
1383      "supports_attachments": true,
1384      "options": {}
1385    },
1386    {
1387      "id": "mistralai/mistral-saba",
1388      "name": "Mistral: Saba",
1389      "cost_per_1m_in": 0.19999999999999998,
1390      "cost_per_1m_out": 0.6,
1391      "cost_per_1m_in_cached": 0,
1392      "cost_per_1m_out_cached": 0,
1393      "context_window": 32768,
1394      "default_max_tokens": 3276,
1395      "can_reason": false,
1396      "supports_attachments": false,
1397      "options": {}
1398    },
1399    {
1400      "id": "moonshotai/kimi-k2",
1401      "name": "MoonshotAI: Kimi K2 0711",
1402      "cost_per_1m_in": 0.5,
1403      "cost_per_1m_out": 2.4,
1404      "cost_per_1m_in_cached": 0,
1405      "cost_per_1m_out_cached": 0,
1406      "context_window": 131072,
1407      "default_max_tokens": 13107,
1408      "can_reason": false,
1409      "supports_attachments": false,
1410      "options": {}
1411    },
1412    {
1413      "id": "moonshotai/kimi-k2-0905",
1414      "name": "MoonshotAI: Kimi K2 0905",
1415      "cost_per_1m_in": 0.5,
1416      "cost_per_1m_out": 2,
1417      "cost_per_1m_in_cached": 0,
1418      "cost_per_1m_out_cached": 0,
1419      "context_window": 262144,
1420      "default_max_tokens": 26214,
1421      "can_reason": false,
1422      "supports_attachments": false,
1423      "options": {}
1424    },
1425    {
1426      "id": "moonshotai/kimi-k2-0905:exacto",
1427      "name": "MoonshotAI: Kimi K2 0905 (exacto)",
1428      "cost_per_1m_in": 2.4,
1429      "cost_per_1m_out": 10,
1430      "cost_per_1m_in_cached": 0,
1431      "cost_per_1m_out_cached": 0,
1432      "context_window": 262144,
1433      "default_max_tokens": 26214,
1434      "can_reason": false,
1435      "supports_attachments": false,
1436      "options": {}
1437    },
1438    {
1439      "id": "nvidia/llama-3.1-nemotron-70b-instruct",
1440      "name": "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
1441      "cost_per_1m_in": 0.6,
1442      "cost_per_1m_out": 0.6,
1443      "cost_per_1m_in_cached": 0,
1444      "cost_per_1m_out_cached": 0,
1445      "context_window": 131072,
1446      "default_max_tokens": 8192,
1447      "can_reason": false,
1448      "supports_attachments": false,
1449      "options": {}
1450    },
1451    {
1452      "id": "nvidia/llama-3.3-nemotron-super-49b-v1.5",
1453      "name": "NVIDIA: Llama 3.3 Nemotron Super 49B V1.5",
1454      "cost_per_1m_in": 0.09999999999999999,
1455      "cost_per_1m_out": 0.39999999999999997,
1456      "cost_per_1m_in_cached": 0,
1457      "cost_per_1m_out_cached": 0,
1458      "context_window": 131072,
1459      "default_max_tokens": 13107,
1460      "can_reason": true,
1461      "reasoning_levels": [
1462        "low",
1463        "medium",
1464        "high"
1465      ],
1466      "default_reasoning_effort": "medium",
1467      "supports_attachments": false,
1468      "options": {}
1469    },
1470    {
1471      "id": "nvidia/nemotron-nano-12b-v2-vl:free",
1472      "name": "NVIDIA: Nemotron Nano 12B 2 VL (free)",
1473      "cost_per_1m_in": 0,
1474      "cost_per_1m_out": 0,
1475      "cost_per_1m_in_cached": 0,
1476      "cost_per_1m_out_cached": 0,
1477      "context_window": 128000,
1478      "default_max_tokens": 64000,
1479      "can_reason": true,
1480      "reasoning_levels": [
1481        "low",
1482        "medium",
1483        "high"
1484      ],
1485      "default_reasoning_effort": "medium",
1486      "supports_attachments": true,
1487      "options": {}
1488    },
1489    {
1490      "id": "nvidia/nemotron-nano-9b-v2",
1491      "name": "NVIDIA: Nemotron Nano 9B V2",
1492      "cost_per_1m_in": 0.04,
1493      "cost_per_1m_out": 0.16,
1494      "cost_per_1m_in_cached": 0,
1495      "cost_per_1m_out_cached": 0,
1496      "context_window": 131072,
1497      "default_max_tokens": 13107,
1498      "can_reason": true,
1499      "reasoning_levels": [
1500        "low",
1501        "medium",
1502        "high"
1503      ],
1504      "default_reasoning_effort": "medium",
1505      "supports_attachments": false,
1506      "options": {}
1507    },
1508    {
1509      "id": "nvidia/nemotron-nano-9b-v2:free",
1510      "name": "NVIDIA: Nemotron Nano 9B V2 (free)",
1511      "cost_per_1m_in": 0,
1512      "cost_per_1m_out": 0,
1513      "cost_per_1m_in_cached": 0,
1514      "cost_per_1m_out_cached": 0,
1515      "context_window": 128000,
1516      "default_max_tokens": 12800,
1517      "can_reason": true,
1518      "reasoning_levels": [
1519        "low",
1520        "medium",
1521        "high"
1522      ],
1523      "default_reasoning_effort": "medium",
1524      "supports_attachments": false,
1525      "options": {}
1526    },
1527    {
1528      "id": "nousresearch/deephermes-3-llama-3-8b-preview",
1529      "name": "Nous: DeepHermes 3 Llama 3 8B Preview",
1530      "cost_per_1m_in": 0.03,
1531      "cost_per_1m_out": 0.11,
1532      "cost_per_1m_in_cached": 0,
1533      "cost_per_1m_out_cached": 0,
1534      "context_window": 131072,
1535      "default_max_tokens": 65536,
1536      "can_reason": false,
1537      "supports_attachments": false,
1538      "options": {}
1539    },
1540    {
1541      "id": "nousresearch/deephermes-3-mistral-24b-preview",
1542      "name": "Nous: DeepHermes 3 Mistral 24B Preview",
1543      "cost_per_1m_in": 0.15,
1544      "cost_per_1m_out": 0.59,
1545      "cost_per_1m_in_cached": 0,
1546      "cost_per_1m_out_cached": 0,
1547      "context_window": 32768,
1548      "default_max_tokens": 16384,
1549      "can_reason": true,
1550      "reasoning_levels": [
1551        "low",
1552        "medium",
1553        "high"
1554      ],
1555      "default_reasoning_effort": "medium",
1556      "supports_attachments": false,
1557      "options": {}
1558    },
1559    {
1560      "id": "nousresearch/hermes-3-llama-3.1-70b",
1561      "name": "Nous: Hermes 3 70B Instruct",
1562      "cost_per_1m_in": 0.39999999999999997,
1563      "cost_per_1m_out": 0.39999999999999997,
1564      "cost_per_1m_in_cached": 0,
1565      "cost_per_1m_out_cached": 0,
1566      "context_window": 12288,
1567      "default_max_tokens": 1228,
1568      "can_reason": false,
1569      "supports_attachments": false,
1570      "options": {}
1571    },
1572    {
1573      "id": "nousresearch/hermes-4-405b",
1574      "name": "Nous: Hermes 4 405B",
1575      "cost_per_1m_in": 0.3,
1576      "cost_per_1m_out": 1.2,
1577      "cost_per_1m_in_cached": 0,
1578      "cost_per_1m_out_cached": 0,
1579      "context_window": 131072,
1580      "default_max_tokens": 65536,
1581      "can_reason": true,
1582      "reasoning_levels": [
1583        "low",
1584        "medium",
1585        "high"
1586      ],
1587      "default_reasoning_effort": "medium",
1588      "supports_attachments": false,
1589      "options": {}
1590    },
1591    {
1592      "id": "openai/codex-mini",
1593      "name": "OpenAI: Codex Mini",
1594      "cost_per_1m_in": 1.5,
1595      "cost_per_1m_out": 6,
1596      "cost_per_1m_in_cached": 0,
1597      "cost_per_1m_out_cached": 0.375,
1598      "context_window": 200000,
1599      "default_max_tokens": 50000,
1600      "can_reason": true,
1601      "reasoning_levels": [
1602        "low",
1603        "medium",
1604        "high"
1605      ],
1606      "default_reasoning_effort": "medium",
1607      "supports_attachments": true,
1608      "options": {}
1609    },
1610    {
1611      "id": "openai/gpt-4-turbo",
1612      "name": "OpenAI: GPT-4 Turbo",
1613      "cost_per_1m_in": 10,
1614      "cost_per_1m_out": 30,
1615      "cost_per_1m_in_cached": 0,
1616      "cost_per_1m_out_cached": 0,
1617      "context_window": 128000,
1618      "default_max_tokens": 2048,
1619      "can_reason": false,
1620      "supports_attachments": true,
1621      "options": {}
1622    },
1623    {
1624      "id": "openai/gpt-4-1106-preview",
1625      "name": "OpenAI: GPT-4 Turbo (older v1106)",
1626      "cost_per_1m_in": 10,
1627      "cost_per_1m_out": 30,
1628      "cost_per_1m_in_cached": 0,
1629      "cost_per_1m_out_cached": 0,
1630      "context_window": 128000,
1631      "default_max_tokens": 2048,
1632      "can_reason": false,
1633      "supports_attachments": false,
1634      "options": {}
1635    },
1636    {
1637      "id": "openai/gpt-4-turbo-preview",
1638      "name": "OpenAI: GPT-4 Turbo Preview",
1639      "cost_per_1m_in": 10,
1640      "cost_per_1m_out": 30,
1641      "cost_per_1m_in_cached": 0,
1642      "cost_per_1m_out_cached": 0,
1643      "context_window": 128000,
1644      "default_max_tokens": 2048,
1645      "can_reason": false,
1646      "supports_attachments": false,
1647      "options": {}
1648    },
1649    {
1650      "id": "openai/gpt-4.1",
1651      "name": "OpenAI: GPT-4.1",
1652      "cost_per_1m_in": 2,
1653      "cost_per_1m_out": 8,
1654      "cost_per_1m_in_cached": 0,
1655      "cost_per_1m_out_cached": 0.5,
1656      "context_window": 1047576,
1657      "default_max_tokens": 104757,
1658      "can_reason": false,
1659      "supports_attachments": true,
1660      "options": {}
1661    },
1662    {
1663      "id": "openai/gpt-4.1-mini",
1664      "name": "OpenAI: GPT-4.1 Mini",
1665      "cost_per_1m_in": 0.39999999999999997,
1666      "cost_per_1m_out": 1.5999999999999999,
1667      "cost_per_1m_in_cached": 0,
1668      "cost_per_1m_out_cached": 0.09999999999999999,
1669      "context_window": 1047576,
1670      "default_max_tokens": 104757,
1671      "can_reason": false,
1672      "supports_attachments": true,
1673      "options": {}
1674    },
1675    {
1676      "id": "openai/gpt-4.1-nano",
1677      "name": "OpenAI: GPT-4.1 Nano",
1678      "cost_per_1m_in": 0.09999999999999999,
1679      "cost_per_1m_out": 0.39999999999999997,
1680      "cost_per_1m_in_cached": 0,
1681      "cost_per_1m_out_cached": 0.024999999999999998,
1682      "context_window": 1047576,
1683      "default_max_tokens": 16384,
1684      "can_reason": false,
1685      "supports_attachments": true,
1686      "options": {}
1687    },
1688    {
1689      "id": "openai/gpt-4o",
1690      "name": "OpenAI: GPT-4o",
1691      "cost_per_1m_in": 2.5,
1692      "cost_per_1m_out": 10,
1693      "cost_per_1m_in_cached": 0,
1694      "cost_per_1m_out_cached": 0,
1695      "context_window": 128000,
1696      "default_max_tokens": 8192,
1697      "can_reason": false,
1698      "supports_attachments": true,
1699      "options": {}
1700    },
1701    {
1702      "id": "openai/gpt-4o-2024-05-13",
1703      "name": "OpenAI: GPT-4o (2024-05-13)",
1704      "cost_per_1m_in": 5,
1705      "cost_per_1m_out": 15,
1706      "cost_per_1m_in_cached": 0,
1707      "cost_per_1m_out_cached": 0,
1708      "context_window": 128000,
1709      "default_max_tokens": 2048,
1710      "can_reason": false,
1711      "supports_attachments": true,
1712      "options": {}
1713    },
1714    {
1715      "id": "openai/gpt-4o-2024-08-06",
1716      "name": "OpenAI: GPT-4o (2024-08-06)",
1717      "cost_per_1m_in": 2.5,
1718      "cost_per_1m_out": 10,
1719      "cost_per_1m_in_cached": 0,
1720      "cost_per_1m_out_cached": 1.25,
1721      "context_window": 128000,
1722      "default_max_tokens": 8192,
1723      "can_reason": false,
1724      "supports_attachments": true,
1725      "options": {}
1726    },
1727    {
1728      "id": "openai/gpt-4o-2024-11-20",
1729      "name": "OpenAI: GPT-4o (2024-11-20)",
1730      "cost_per_1m_in": 2.5,
1731      "cost_per_1m_out": 10,
1732      "cost_per_1m_in_cached": 0,
1733      "cost_per_1m_out_cached": 1.25,
1734      "context_window": 128000,
1735      "default_max_tokens": 8192,
1736      "can_reason": false,
1737      "supports_attachments": true,
1738      "options": {}
1739    },
1740    {
1741      "id": "openai/gpt-4o:extended",
1742      "name": "OpenAI: GPT-4o (extended)",
1743      "cost_per_1m_in": 6,
1744      "cost_per_1m_out": 18,
1745      "cost_per_1m_in_cached": 0,
1746      "cost_per_1m_out_cached": 0,
1747      "context_window": 128000,
1748      "default_max_tokens": 32000,
1749      "can_reason": false,
1750      "supports_attachments": true,
1751      "options": {}
1752    },
1753    {
1754      "id": "openai/gpt-4o-audio-preview",
1755      "name": "OpenAI: GPT-4o Audio",
1756      "cost_per_1m_in": 2.5,
1757      "cost_per_1m_out": 10,
1758      "cost_per_1m_in_cached": 0,
1759      "cost_per_1m_out_cached": 0,
1760      "context_window": 128000,
1761      "default_max_tokens": 8192,
1762      "can_reason": false,
1763      "supports_attachments": false,
1764      "options": {}
1765    },
1766    {
1767      "id": "openai/gpt-4o-mini",
1768      "name": "OpenAI: GPT-4o-mini",
1769      "cost_per_1m_in": 0.15,
1770      "cost_per_1m_out": 0.6,
1771      "cost_per_1m_in_cached": 0,
1772      "cost_per_1m_out_cached": 0.075,
1773      "context_window": 128000,
1774      "default_max_tokens": 8192,
1775      "can_reason": false,
1776      "supports_attachments": true,
1777      "options": {}
1778    },
1779    {
1780      "id": "openai/gpt-4o-mini-2024-07-18",
1781      "name": "OpenAI: GPT-4o-mini (2024-07-18)",
1782      "cost_per_1m_in": 0.15,
1783      "cost_per_1m_out": 0.6,
1784      "cost_per_1m_in_cached": 0,
1785      "cost_per_1m_out_cached": 0.075,
1786      "context_window": 128000,
1787      "default_max_tokens": 8192,
1788      "can_reason": false,
1789      "supports_attachments": true,
1790      "options": {}
1791    },
1792    {
1793      "id": "openai/gpt-5",
1794      "name": "OpenAI: GPT-5",
1795      "cost_per_1m_in": 1.25,
1796      "cost_per_1m_out": 10,
1797      "cost_per_1m_in_cached": 0,
1798      "cost_per_1m_out_cached": 0.125,
1799      "context_window": 400000,
1800      "default_max_tokens": 64000,
1801      "can_reason": true,
1802      "reasoning_levels": [
1803        "low",
1804        "medium",
1805        "high"
1806      ],
1807      "default_reasoning_effort": "medium",
1808      "supports_attachments": true,
1809      "options": {}
1810    },
1811    {
1812      "id": "openai/gpt-5-codex",
1813      "name": "OpenAI: GPT-5 Codex",
1814      "cost_per_1m_in": 1.25,
1815      "cost_per_1m_out": 10,
1816      "cost_per_1m_in_cached": 0,
1817      "cost_per_1m_out_cached": 0.125,
1818      "context_window": 400000,
1819      "default_max_tokens": 64000,
1820      "can_reason": true,
1821      "reasoning_levels": [
1822        "low",
1823        "medium",
1824        "high"
1825      ],
1826      "default_reasoning_effort": "medium",
1827      "supports_attachments": true,
1828      "options": {}
1829    },
1830    {
1831      "id": "openai/gpt-5-image",
1832      "name": "OpenAI: GPT-5 Image",
1833      "cost_per_1m_in": 10,
1834      "cost_per_1m_out": 10,
1835      "cost_per_1m_in_cached": 0,
1836      "cost_per_1m_out_cached": 1.25,
1837      "context_window": 400000,
1838      "default_max_tokens": 64000,
1839      "can_reason": true,
1840      "reasoning_levels": [
1841        "low",
1842        "medium",
1843        "high"
1844      ],
1845      "default_reasoning_effort": "medium",
1846      "supports_attachments": true,
1847      "options": {}
1848    },
1849    {
1850      "id": "openai/gpt-5-image-mini",
1851      "name": "OpenAI: GPT-5 Image Mini",
1852      "cost_per_1m_in": 2.5,
1853      "cost_per_1m_out": 2,
1854      "cost_per_1m_in_cached": 0,
1855      "cost_per_1m_out_cached": 0.25,
1856      "context_window": 400000,
1857      "default_max_tokens": 64000,
1858      "can_reason": true,
1859      "reasoning_levels": [
1860        "low",
1861        "medium",
1862        "high"
1863      ],
1864      "default_reasoning_effort": "medium",
1865      "supports_attachments": true,
1866      "options": {}
1867    },
1868    {
1869      "id": "openai/gpt-5-mini",
1870      "name": "OpenAI: GPT-5 Mini",
1871      "cost_per_1m_in": 0.25,
1872      "cost_per_1m_out": 2,
1873      "cost_per_1m_in_cached": 0,
1874      "cost_per_1m_out_cached": 0.03,
1875      "context_window": 400000,
1876      "default_max_tokens": 40000,
1877      "can_reason": true,
1878      "reasoning_levels": [
1879        "low",
1880        "medium",
1881        "high"
1882      ],
1883      "default_reasoning_effort": "medium",
1884      "supports_attachments": true,
1885      "options": {}
1886    },
1887    {
1888      "id": "openai/gpt-5-nano",
1889      "name": "OpenAI: GPT-5 Nano",
1890      "cost_per_1m_in": 0.049999999999999996,
1891      "cost_per_1m_out": 0.39999999999999997,
1892      "cost_per_1m_in_cached": 0,
1893      "cost_per_1m_out_cached": 0.005,
1894      "context_window": 400000,
1895      "default_max_tokens": 64000,
1896      "can_reason": true,
1897      "reasoning_levels": [
1898        "low",
1899        "medium",
1900        "high"
1901      ],
1902      "default_reasoning_effort": "medium",
1903      "supports_attachments": true,
1904      "options": {}
1905    },
1906    {
1907      "id": "openai/gpt-5-pro",
1908      "name": "OpenAI: GPT-5 Pro",
1909      "cost_per_1m_in": 15,
1910      "cost_per_1m_out": 120,
1911      "cost_per_1m_in_cached": 0,
1912      "cost_per_1m_out_cached": 0,
1913      "context_window": 400000,
1914      "default_max_tokens": 64000,
1915      "can_reason": true,
1916      "reasoning_levels": [
1917        "low",
1918        "medium",
1919        "high"
1920      ],
1921      "default_reasoning_effort": "medium",
1922      "supports_attachments": true,
1923      "options": {}
1924    },
1925    {
1926      "id": "openai/gpt-oss-120b",
1927      "name": "OpenAI: gpt-oss-120b",
1928      "cost_per_1m_in": 0.049999999999999996,
1929      "cost_per_1m_out": 0.25,
1930      "cost_per_1m_in_cached": 0,
1931      "cost_per_1m_out_cached": 0,
1932      "context_window": 131072,
1933      "default_max_tokens": 16384,
1934      "can_reason": true,
1935      "reasoning_levels": [
1936        "low",
1937        "medium",
1938        "high"
1939      ],
1940      "default_reasoning_effort": "medium",
1941      "supports_attachments": false,
1942      "options": {}
1943    },
1944    {
1945      "id": "openai/gpt-oss-120b:exacto",
1946      "name": "OpenAI: gpt-oss-120b (exacto)",
1947      "cost_per_1m_in": 0.049999999999999996,
1948      "cost_per_1m_out": 0.25,
1949      "cost_per_1m_in_cached": 0,
1950      "cost_per_1m_out_cached": 0,
1951      "context_window": 131072,
1952      "default_max_tokens": 16384,
1953      "can_reason": true,
1954      "reasoning_levels": [
1955        "low",
1956        "medium",
1957        "high"
1958      ],
1959      "default_reasoning_effort": "medium",
1960      "supports_attachments": false,
1961      "options": {}
1962    },
1963    {
1964      "id": "openai/gpt-oss-20b",
1965      "name": "OpenAI: gpt-oss-20b",
1966      "cost_per_1m_in": 0.04,
1967      "cost_per_1m_out": 0.15,
1968      "cost_per_1m_in_cached": 0,
1969      "cost_per_1m_out_cached": 0,
1970      "context_window": 131072,
1971      "default_max_tokens": 13107,
1972      "can_reason": true,
1973      "reasoning_levels": [
1974        "low",
1975        "medium",
1976        "high"
1977      ],
1978      "default_reasoning_effort": "medium",
1979      "supports_attachments": false,
1980      "options": {}
1981    },
1982    {
1983      "id": "openai/gpt-oss-20b:free",
1984      "name": "OpenAI: gpt-oss-20b (free)",
1985      "cost_per_1m_in": 0,
1986      "cost_per_1m_out": 0,
1987      "cost_per_1m_in_cached": 0,
1988      "cost_per_1m_out_cached": 0,
1989      "context_window": 131072,
1990      "default_max_tokens": 65536,
1991      "can_reason": true,
1992      "reasoning_levels": [
1993        "low",
1994        "medium",
1995        "high"
1996      ],
1997      "default_reasoning_effort": "medium",
1998      "supports_attachments": false,
1999      "options": {}
2000    },
2001    {
2002      "id": "openai/o1",
2003      "name": "OpenAI: o1",
2004      "cost_per_1m_in": 15,
2005      "cost_per_1m_out": 60,
2006      "cost_per_1m_in_cached": 0,
2007      "cost_per_1m_out_cached": 7.5,
2008      "context_window": 200000,
2009      "default_max_tokens": 50000,
2010      "can_reason": false,
2011      "supports_attachments": true,
2012      "options": {}
2013    },
2014    {
2015      "id": "openai/o3",
2016      "name": "OpenAI: o3",
2017      "cost_per_1m_in": 2,
2018      "cost_per_1m_out": 8,
2019      "cost_per_1m_in_cached": 0,
2020      "cost_per_1m_out_cached": 0.5,
2021      "context_window": 200000,
2022      "default_max_tokens": 50000,
2023      "can_reason": true,
2024      "reasoning_levels": [
2025        "low",
2026        "medium",
2027        "high"
2028      ],
2029      "default_reasoning_effort": "medium",
2030      "supports_attachments": true,
2031      "options": {}
2032    },
2033    {
2034      "id": "openai/o3-deep-research",
2035      "name": "OpenAI: o3 Deep Research",
2036      "cost_per_1m_in": 10,
2037      "cost_per_1m_out": 40,
2038      "cost_per_1m_in_cached": 0,
2039      "cost_per_1m_out_cached": 2.5,
2040      "context_window": 200000,
2041      "default_max_tokens": 50000,
2042      "can_reason": true,
2043      "reasoning_levels": [
2044        "low",
2045        "medium",
2046        "high"
2047      ],
2048      "default_reasoning_effort": "medium",
2049      "supports_attachments": true,
2050      "options": {}
2051    },
2052    {
2053      "id": "openai/o3-mini",
2054      "name": "OpenAI: o3 Mini",
2055      "cost_per_1m_in": 1.1,
2056      "cost_per_1m_out": 4.4,
2057      "cost_per_1m_in_cached": 0,
2058      "cost_per_1m_out_cached": 0.55,
2059      "context_window": 200000,
2060      "default_max_tokens": 50000,
2061      "can_reason": false,
2062      "supports_attachments": false,
2063      "options": {}
2064    },
2065    {
2066      "id": "openai/o3-mini-high",
2067      "name": "OpenAI: o3 Mini High",
2068      "cost_per_1m_in": 1.1,
2069      "cost_per_1m_out": 4.4,
2070      "cost_per_1m_in_cached": 0,
2071      "cost_per_1m_out_cached": 0.55,
2072      "context_window": 200000,
2073      "default_max_tokens": 50000,
2074      "can_reason": false,
2075      "supports_attachments": false,
2076      "options": {}
2077    },
2078    {
2079      "id": "openai/o3-pro",
2080      "name": "OpenAI: o3 Pro",
2081      "cost_per_1m_in": 20,
2082      "cost_per_1m_out": 80,
2083      "cost_per_1m_in_cached": 0,
2084      "cost_per_1m_out_cached": 0,
2085      "context_window": 200000,
2086      "default_max_tokens": 50000,
2087      "can_reason": true,
2088      "reasoning_levels": [
2089        "low",
2090        "medium",
2091        "high"
2092      ],
2093      "default_reasoning_effort": "medium",
2094      "supports_attachments": true,
2095      "options": {}
2096    },
2097    {
2098      "id": "openai/o4-mini",
2099      "name": "OpenAI: o4 Mini",
2100      "cost_per_1m_in": 1.1,
2101      "cost_per_1m_out": 4.4,
2102      "cost_per_1m_in_cached": 0,
2103      "cost_per_1m_out_cached": 0.275,
2104      "context_window": 200000,
2105      "default_max_tokens": 50000,
2106      "can_reason": true,
2107      "reasoning_levels": [
2108        "low",
2109        "medium",
2110        "high"
2111      ],
2112      "default_reasoning_effort": "medium",
2113      "supports_attachments": true,
2114      "options": {}
2115    },
2116    {
2117      "id": "openai/o4-mini-deep-research",
2118      "name": "OpenAI: o4 Mini Deep Research",
2119      "cost_per_1m_in": 2,
2120      "cost_per_1m_out": 8,
2121      "cost_per_1m_in_cached": 0,
2122      "cost_per_1m_out_cached": 0.5,
2123      "context_window": 200000,
2124      "default_max_tokens": 50000,
2125      "can_reason": true,
2126      "reasoning_levels": [
2127        "low",
2128        "medium",
2129        "high"
2130      ],
2131      "default_reasoning_effort": "medium",
2132      "supports_attachments": true,
2133      "options": {}
2134    },
2135    {
2136      "id": "openai/o4-mini-high",
2137      "name": "OpenAI: o4 Mini High",
2138      "cost_per_1m_in": 1.1,
2139      "cost_per_1m_out": 4.4,
2140      "cost_per_1m_in_cached": 0,
2141      "cost_per_1m_out_cached": 0.275,
2142      "context_window": 200000,
2143      "default_max_tokens": 50000,
2144      "can_reason": true,
2145      "reasoning_levels": [
2146        "low",
2147        "medium",
2148        "high"
2149      ],
2150      "default_reasoning_effort": "medium",
2151      "supports_attachments": true,
2152      "options": {}
2153    },
2154    {
2155      "id": "qwen/qwen-2.5-72b-instruct",
2156      "name": "Qwen2.5 72B Instruct",
2157      "cost_per_1m_in": 0.13,
2158      "cost_per_1m_out": 0.39999999999999997,
2159      "cost_per_1m_in_cached": 0,
2160      "cost_per_1m_out_cached": 0,
2161      "context_window": 131072,
2162      "default_max_tokens": 13107,
2163      "can_reason": false,
2164      "supports_attachments": false,
2165      "options": {}
2166    },
2167    {
2168      "id": "qwen/qwq-32b",
2169      "name": "Qwen: QwQ 32B",
2170      "cost_per_1m_in": 0.15,
2171      "cost_per_1m_out": 0.58,
2172      "cost_per_1m_in_cached": 0,
2173      "cost_per_1m_out_cached": 0,
2174      "context_window": 131072,
2175      "default_max_tokens": 65536,
2176      "can_reason": true,
2177      "reasoning_levels": [
2178        "low",
2179        "medium",
2180        "high"
2181      ],
2182      "default_reasoning_effort": "medium",
2183      "supports_attachments": false,
2184      "options": {}
2185    },
2186    {
2187      "id": "qwen/qwen-plus-2025-07-28",
2188      "name": "Qwen: Qwen Plus 0728",
2189      "cost_per_1m_in": 0.39999999999999997,
2190      "cost_per_1m_out": 1.2,
2191      "cost_per_1m_in_cached": 0,
2192      "cost_per_1m_out_cached": 0,
2193      "context_window": 1000000,
2194      "default_max_tokens": 16384,
2195      "can_reason": false,
2196      "supports_attachments": false,
2197      "options": {}
2198    },
2199    {
2200      "id": "qwen/qwen-plus-2025-07-28:thinking",
2201      "name": "Qwen: Qwen Plus 0728 (thinking)",
2202      "cost_per_1m_in": 0.39999999999999997,
2203      "cost_per_1m_out": 4,
2204      "cost_per_1m_in_cached": 0,
2205      "cost_per_1m_out_cached": 0,
2206      "context_window": 1000000,
2207      "default_max_tokens": 16384,
2208      "can_reason": true,
2209      "reasoning_levels": [
2210        "low",
2211        "medium",
2212        "high"
2213      ],
2214      "default_reasoning_effort": "medium",
2215      "supports_attachments": false,
2216      "options": {}
2217    },
2218    {
2219      "id": "qwen/qwen-vl-max",
2220      "name": "Qwen: Qwen VL Max",
2221      "cost_per_1m_in": 0.7999999999999999,
2222      "cost_per_1m_out": 3.1999999999999997,
2223      "cost_per_1m_in_cached": 0,
2224      "cost_per_1m_out_cached": 0,
2225      "context_window": 131072,
2226      "default_max_tokens": 4096,
2227      "can_reason": false,
2228      "supports_attachments": true,
2229      "options": {}
2230    },
2231    {
2232      "id": "qwen/qwen-max",
2233      "name": "Qwen: Qwen-Max ",
2234      "cost_per_1m_in": 1.5999999999999999,
2235      "cost_per_1m_out": 6.3999999999999995,
2236      "cost_per_1m_in_cached": 0,
2237      "cost_per_1m_out_cached": 0.64,
2238      "context_window": 32768,
2239      "default_max_tokens": 4096,
2240      "can_reason": false,
2241      "supports_attachments": false,
2242      "options": {}
2243    },
2244    {
2245      "id": "qwen/qwen-plus",
2246      "name": "Qwen: Qwen-Plus",
2247      "cost_per_1m_in": 0.39999999999999997,
2248      "cost_per_1m_out": 1.2,
2249      "cost_per_1m_in_cached": 0,
2250      "cost_per_1m_out_cached": 0.16,
2251      "context_window": 131072,
2252      "default_max_tokens": 4096,
2253      "can_reason": false,
2254      "supports_attachments": false,
2255      "options": {}
2256    },
2257    {
2258      "id": "qwen/qwen-turbo",
2259      "name": "Qwen: Qwen-Turbo",
2260      "cost_per_1m_in": 0.049999999999999996,
2261      "cost_per_1m_out": 0.19999999999999998,
2262      "cost_per_1m_in_cached": 0,
2263      "cost_per_1m_out_cached": 0.02,
2264      "context_window": 1000000,
2265      "default_max_tokens": 4096,
2266      "can_reason": false,
2267      "supports_attachments": false,
2268      "options": {}
2269    },
2270    {
2271      "id": "qwen/qwen3-14b",
2272      "name": "Qwen: Qwen3 14B",
2273      "cost_per_1m_in": 0.06,
2274      "cost_per_1m_out": 0.24,
2275      "cost_per_1m_in_cached": 0,
2276      "cost_per_1m_out_cached": 0,
2277      "context_window": 40960,
2278      "default_max_tokens": 20480,
2279      "can_reason": true,
2280      "reasoning_levels": [
2281        "low",
2282        "medium",
2283        "high"
2284      ],
2285      "default_reasoning_effort": "medium",
2286      "supports_attachments": false,
2287      "options": {}
2288    },
2289    {
2290      "id": "qwen/qwen3-235b-a22b",
2291      "name": "Qwen: Qwen3 235B A22B",
2292      "cost_per_1m_in": 0.22,
2293      "cost_per_1m_out": 0.88,
2294      "cost_per_1m_in_cached": 0,
2295      "cost_per_1m_out_cached": 0,
2296      "context_window": 131072,
2297      "default_max_tokens": 8192,
2298      "can_reason": true,
2299      "reasoning_levels": [
2300        "low",
2301        "medium",
2302        "high"
2303      ],
2304      "default_reasoning_effort": "medium",
2305      "supports_attachments": false,
2306      "options": {}
2307    },
2308    {
2309      "id": "qwen/qwen3-235b-a22b:free",
2310      "name": "Qwen: Qwen3 235B A22B (free)",
2311      "cost_per_1m_in": 0,
2312      "cost_per_1m_out": 0,
2313      "cost_per_1m_in_cached": 0,
2314      "cost_per_1m_out_cached": 0,
2315      "context_window": 131072,
2316      "default_max_tokens": 13107,
2317      "can_reason": true,
2318      "reasoning_levels": [
2319        "low",
2320        "medium",
2321        "high"
2322      ],
2323      "default_reasoning_effort": "medium",
2324      "supports_attachments": false,
2325      "options": {}
2326    },
2327    {
2328      "id": "qwen/qwen3-235b-a22b-2507",
2329      "name": "Qwen: Qwen3 235B A22B Instruct 2507",
2330      "cost_per_1m_in": 0.09999999999999999,
2331      "cost_per_1m_out": 0.09999999999999999,
2332      "cost_per_1m_in_cached": 0,
2333      "cost_per_1m_out_cached": 0,
2334      "context_window": 262144,
2335      "default_max_tokens": 131072,
2336      "can_reason": false,
2337      "supports_attachments": false,
2338      "options": {}
2339    },
2340    {
2341      "id": "qwen/qwen3-235b-a22b-thinking-2507",
2342      "name": "Qwen: Qwen3 235B A22B Thinking 2507",
2343      "cost_per_1m_in": 0.11,
2344      "cost_per_1m_out": 0.6,
2345      "cost_per_1m_in_cached": 0,
2346      "cost_per_1m_out_cached": 0,
2347      "context_window": 262144,
2348      "default_max_tokens": 131072,
2349      "can_reason": true,
2350      "reasoning_levels": [
2351        "low",
2352        "medium",
2353        "high"
2354      ],
2355      "default_reasoning_effort": "medium",
2356      "supports_attachments": false,
2357      "options": {}
2358    },
2359    {
2360      "id": "qwen/qwen3-30b-a3b",
2361      "name": "Qwen: Qwen3 30B A3B",
2362      "cost_per_1m_in": 0.09,
2363      "cost_per_1m_out": 0.44999999999999996,
2364      "cost_per_1m_in_cached": 0,
2365      "cost_per_1m_out_cached": 0,
2366      "context_window": 131072,
2367      "default_max_tokens": 65536,
2368      "can_reason": true,
2369      "reasoning_levels": [
2370        "low",
2371        "medium",
2372        "high"
2373      ],
2374      "default_reasoning_effort": "medium",
2375      "supports_attachments": false,
2376      "options": {}
2377    },
2378    {
2379      "id": "qwen/qwen3-30b-a3b-instruct-2507",
2380      "name": "Qwen: Qwen3 30B A3B Instruct 2507",
2381      "cost_per_1m_in": 0.09,
2382      "cost_per_1m_out": 0.3,
2383      "cost_per_1m_in_cached": 0,
2384      "cost_per_1m_out_cached": 0,
2385      "context_window": 262144,
2386      "default_max_tokens": 131072,
2387      "can_reason": false,
2388      "supports_attachments": false,
2389      "options": {}
2390    },
2391    {
2392      "id": "qwen/qwen3-30b-a3b-thinking-2507",
2393      "name": "Qwen: Qwen3 30B A3B Thinking 2507",
2394      "cost_per_1m_in": 0.09,
2395      "cost_per_1m_out": 0.3,
2396      "cost_per_1m_in_cached": 0,
2397      "cost_per_1m_out_cached": 0,
2398      "context_window": 262144,
2399      "default_max_tokens": 65536,
2400      "can_reason": true,
2401      "reasoning_levels": [
2402        "low",
2403        "medium",
2404        "high"
2405      ],
2406      "default_reasoning_effort": "medium",
2407      "supports_attachments": false,
2408      "options": {}
2409    },
2410    {
2411      "id": "qwen/qwen3-32b",
2412      "name": "Qwen: Qwen3 32B",
2413      "cost_per_1m_in": 0.39999999999999997,
2414      "cost_per_1m_out": 0.7999999999999999,
2415      "cost_per_1m_in_cached": 0,
2416      "cost_per_1m_out_cached": 0,
2417      "context_window": 131072,
2418      "default_max_tokens": 16384,
2419      "can_reason": true,
2420      "reasoning_levels": [
2421        "low",
2422        "medium",
2423        "high"
2424      ],
2425      "default_reasoning_effort": "medium",
2426      "supports_attachments": false,
2427      "options": {}
2428    },
2429    {
2430      "id": "qwen/qwen3-4b:free",
2431      "name": "Qwen: Qwen3 4B (free)",
2432      "cost_per_1m_in": 0,
2433      "cost_per_1m_out": 0,
2434      "cost_per_1m_in_cached": 0,
2435      "cost_per_1m_out_cached": 0,
2436      "context_window": 40960,
2437      "default_max_tokens": 4096,
2438      "can_reason": true,
2439      "reasoning_levels": [
2440        "low",
2441        "medium",
2442        "high"
2443      ],
2444      "default_reasoning_effort": "medium",
2445      "supports_attachments": false,
2446      "options": {}
2447    },
2448    {
2449      "id": "qwen/qwen3-coder-30b-a3b-instruct",
2450      "name": "Qwen: Qwen3 Coder 30B A3B Instruct",
2451      "cost_per_1m_in": 0.06,
2452      "cost_per_1m_out": 0.25,
2453      "cost_per_1m_in_cached": 0,
2454      "cost_per_1m_out_cached": 0,
2455      "context_window": 262144,
2456      "default_max_tokens": 131072,
2457      "can_reason": false,
2458      "supports_attachments": false,
2459      "options": {}
2460    },
2461    {
2462      "id": "qwen/qwen3-coder",
2463      "name": "Qwen: Qwen3 Coder 480B A35B",
2464      "cost_per_1m_in": 1.5,
2465      "cost_per_1m_out": 7.5,
2466      "cost_per_1m_in_cached": 0,
2467      "cost_per_1m_out_cached": 0,
2468      "context_window": 262144,
2469      "default_max_tokens": 32768,
2470      "can_reason": false,
2471      "supports_attachments": false,
2472      "options": {}
2473    },
2474    {
2475      "id": "qwen/qwen3-coder:exacto",
2476      "name": "Qwen: Qwen3 Coder 480B A35B (exacto)",
2477      "cost_per_1m_in": 0.38,
2478      "cost_per_1m_out": 1.53,
2479      "cost_per_1m_in_cached": 0,
2480      "cost_per_1m_out_cached": 0,
2481      "context_window": 262144,
2482      "default_max_tokens": 131072,
2483      "can_reason": true,
2484      "reasoning_levels": [
2485        "low",
2486        "medium",
2487        "high"
2488      ],
2489      "default_reasoning_effort": "medium",
2490      "supports_attachments": false,
2491      "options": {}
2492    },
2493    {
2494      "id": "qwen/qwen3-coder:free",
2495      "name": "Qwen: Qwen3 Coder 480B A35B (free)",
2496      "cost_per_1m_in": 0,
2497      "cost_per_1m_out": 0,
2498      "cost_per_1m_in_cached": 0,
2499      "cost_per_1m_out_cached": 0,
2500      "context_window": 262144,
2501      "default_max_tokens": 26214,
2502      "can_reason": false,
2503      "supports_attachments": false,
2504      "options": {}
2505    },
2506    {
2507      "id": "qwen/qwen3-coder-flash",
2508      "name": "Qwen: Qwen3 Coder Flash",
2509      "cost_per_1m_in": 0.3,
2510      "cost_per_1m_out": 1.5,
2511      "cost_per_1m_in_cached": 0,
2512      "cost_per_1m_out_cached": 0.08,
2513      "context_window": 128000,
2514      "default_max_tokens": 32768,
2515      "can_reason": false,
2516      "supports_attachments": false,
2517      "options": {}
2518    },
2519    {
2520      "id": "qwen/qwen3-coder-plus",
2521      "name": "Qwen: Qwen3 Coder Plus",
2522      "cost_per_1m_in": 1,
2523      "cost_per_1m_out": 5,
2524      "cost_per_1m_in_cached": 0,
2525      "cost_per_1m_out_cached": 0.09999999999999999,
2526      "context_window": 128000,
2527      "default_max_tokens": 32768,
2528      "can_reason": false,
2529      "supports_attachments": false,
2530      "options": {}
2531    },
2532    {
2533      "id": "qwen/qwen3-max",
2534      "name": "Qwen: Qwen3 Max",
2535      "cost_per_1m_in": 1.2,
2536      "cost_per_1m_out": 6,
2537      "cost_per_1m_in_cached": 0,
2538      "cost_per_1m_out_cached": 0.24,
2539      "context_window": 256000,
2540      "default_max_tokens": 16384,
2541      "can_reason": false,
2542      "supports_attachments": false,
2543      "options": {}
2544    },
2545    {
2546      "id": "qwen/qwen3-next-80b-a3b-instruct",
2547      "name": "Qwen: Qwen3 Next 80B A3B Instruct",
2548      "cost_per_1m_in": 0.15,
2549      "cost_per_1m_out": 1.5,
2550      "cost_per_1m_in_cached": 0,
2551      "cost_per_1m_out_cached": 0,
2552      "context_window": 262144,
2553      "default_max_tokens": 131072,
2554      "can_reason": false,
2555      "supports_attachments": false,
2556      "options": {}
2557    },
2558    {
2559      "id": "qwen/qwen3-next-80b-a3b-thinking",
2560      "name": "Qwen: Qwen3 Next 80B A3B Thinking",
2561      "cost_per_1m_in": 0.3,
2562      "cost_per_1m_out": 0.3,
2563      "cost_per_1m_in_cached": 0,
2564      "cost_per_1m_out_cached": 0,
2565      "context_window": 262144,
2566      "default_max_tokens": 131072,
2567      "can_reason": true,
2568      "reasoning_levels": [
2569        "low",
2570        "medium",
2571        "high"
2572      ],
2573      "default_reasoning_effort": "medium",
2574      "supports_attachments": false,
2575      "options": {}
2576    },
2577    {
2578      "id": "qwen/qwen3-vl-235b-a22b-thinking",
2579      "name": "Qwen: Qwen3 VL 235B A22B Thinking",
2580      "cost_per_1m_in": 0.3,
2581      "cost_per_1m_out": 1.2,
2582      "cost_per_1m_in_cached": 0,
2583      "cost_per_1m_out_cached": 0,
2584      "context_window": 262144,
2585      "default_max_tokens": 131072,
2586      "can_reason": true,
2587      "reasoning_levels": [
2588        "low",
2589        "medium",
2590        "high"
2591      ],
2592      "default_reasoning_effort": "medium",
2593      "supports_attachments": true,
2594      "options": {}
2595    },
2596    {
2597      "id": "qwen/qwen3-vl-30b-a3b-thinking",
2598      "name": "Qwen: Qwen3 VL 30B A3B Thinking",
2599      "cost_per_1m_in": 0.19999999999999998,
2600      "cost_per_1m_out": 1,
2601      "cost_per_1m_in_cached": 0,
2602      "cost_per_1m_out_cached": 0,
2603      "context_window": 131072,
2604      "default_max_tokens": 16384,
2605      "can_reason": true,
2606      "reasoning_levels": [
2607        "low",
2608        "medium",
2609        "high"
2610      ],
2611      "default_reasoning_effort": "medium",
2612      "supports_attachments": true,
2613      "options": {}
2614    },
2615    {
2616      "id": "qwen/qwen3-vl-8b-instruct",
2617      "name": "Qwen: Qwen3 VL 8B Instruct",
2618      "cost_per_1m_in": 0.08,
2619      "cost_per_1m_out": 0.5,
2620      "cost_per_1m_in_cached": 0,
2621      "cost_per_1m_out_cached": 0,
2622      "context_window": 131072,
2623      "default_max_tokens": 16384,
2624      "can_reason": false,
2625      "supports_attachments": true,
2626      "options": {}
2627    },
2628    {
2629      "id": "qwen/qwen3-vl-8b-thinking",
2630      "name": "Qwen: Qwen3 VL 8B Thinking",
2631      "cost_per_1m_in": 0.18,
2632      "cost_per_1m_out": 2.0999999999999996,
2633      "cost_per_1m_in_cached": 0,
2634      "cost_per_1m_out_cached": 0,
2635      "context_window": 256000,
2636      "default_max_tokens": 16384,
2637      "can_reason": true,
2638      "reasoning_levels": [
2639        "low",
2640        "medium",
2641        "high"
2642      ],
2643      "default_reasoning_effort": "medium",
2644      "supports_attachments": true,
2645      "options": {}
2646    },
2647    {
2648      "id": "stepfun-ai/step3",
2649      "name": "StepFun: Step3",
2650      "cost_per_1m_in": 0.5700000000000001,
2651      "cost_per_1m_out": 1.42,
2652      "cost_per_1m_in_cached": 0,
2653      "cost_per_1m_out_cached": 0,
2654      "context_window": 65536,
2655      "default_max_tokens": 32768,
2656      "can_reason": true,
2657      "reasoning_levels": [
2658        "low",
2659        "medium",
2660        "high"
2661      ],
2662      "default_reasoning_effort": "medium",
2663      "supports_attachments": true,
2664      "options": {}
2665    },
2666    {
2667      "id": "tngtech/deepseek-r1t2-chimera",
2668      "name": "TNG: DeepSeek R1T2 Chimera",
2669      "cost_per_1m_in": 0.3,
2670      "cost_per_1m_out": 1.2,
2671      "cost_per_1m_in_cached": 0,
2672      "cost_per_1m_out_cached": 0,
2673      "context_window": 163840,
2674      "default_max_tokens": 81920,
2675      "can_reason": true,
2676      "reasoning_levels": [
2677        "low",
2678        "medium",
2679        "high"
2680      ],
2681      "default_reasoning_effort": "medium",
2682      "supports_attachments": false,
2683      "options": {}
2684    },
2685    {
2686      "id": "thedrummer/rocinante-12b",
2687      "name": "TheDrummer: Rocinante 12B",
2688      "cost_per_1m_in": 0.16999999999999998,
2689      "cost_per_1m_out": 0.43,
2690      "cost_per_1m_in_cached": 0,
2691      "cost_per_1m_out_cached": 0,
2692      "context_window": 32768,
2693      "default_max_tokens": 3276,
2694      "can_reason": false,
2695      "supports_attachments": false,
2696      "options": {}
2697    },
2698    {
2699      "id": "thedrummer/unslopnemo-12b",
2700      "name": "TheDrummer: UnslopNemo 12B",
2701      "cost_per_1m_in": 0.39999999999999997,
2702      "cost_per_1m_out": 0.39999999999999997,
2703      "cost_per_1m_in_cached": 0,
2704      "cost_per_1m_out_cached": 0,
2705      "context_window": 32768,
2706      "default_max_tokens": 3276,
2707      "can_reason": false,
2708      "supports_attachments": false,
2709      "options": {}
2710    },
2711    {
2712      "id": "alibaba/tongyi-deepresearch-30b-a3b",
2713      "name": "Tongyi DeepResearch 30B A3B",
2714      "cost_per_1m_in": 0.09,
2715      "cost_per_1m_out": 0.44999999999999996,
2716      "cost_per_1m_in_cached": 0,
2717      "cost_per_1m_out_cached": 0,
2718      "context_window": 131072,
2719      "default_max_tokens": 65536,
2720      "can_reason": true,
2721      "reasoning_levels": [
2722        "low",
2723        "medium",
2724        "high"
2725      ],
2726      "default_reasoning_effort": "medium",
2727      "supports_attachments": false,
2728      "options": {}
2729    },
2730    {
2731      "id": "alibaba/tongyi-deepresearch-30b-a3b:free",
2732      "name": "Tongyi DeepResearch 30B A3B (free)",
2733      "cost_per_1m_in": 0,
2734      "cost_per_1m_out": 0,
2735      "cost_per_1m_in_cached": 0,
2736      "cost_per_1m_out_cached": 0,
2737      "context_window": 131072,
2738      "default_max_tokens": 65536,
2739      "can_reason": true,
2740      "reasoning_levels": [
2741        "low",
2742        "medium",
2743        "high"
2744      ],
2745      "default_reasoning_effort": "medium",
2746      "supports_attachments": false,
2747      "options": {}
2748    },
2749    {
2750      "id": "z-ai/glm-4-32b",
2751      "name": "Z.AI: GLM 4 32B ",
2752      "cost_per_1m_in": 0.09999999999999999,
2753      "cost_per_1m_out": 0.09999999999999999,
2754      "cost_per_1m_in_cached": 0,
2755      "cost_per_1m_out_cached": 0,
2756      "context_window": 128000,
2757      "default_max_tokens": 12800,
2758      "can_reason": false,
2759      "supports_attachments": false,
2760      "options": {}
2761    },
2762    {
2763      "id": "z-ai/glm-4.5",
2764      "name": "Z.AI: GLM 4.5",
2765      "cost_per_1m_in": 0.35,
2766      "cost_per_1m_out": 2,
2767      "cost_per_1m_in_cached": 0,
2768      "cost_per_1m_out_cached": 0,
2769      "context_window": 131072,
2770      "default_max_tokens": 65536,
2771      "can_reason": true,
2772      "reasoning_levels": [
2773        "low",
2774        "medium",
2775        "high"
2776      ],
2777      "default_reasoning_effort": "medium",
2778      "supports_attachments": false,
2779      "options": {}
2780    },
2781    {
2782      "id": "z-ai/glm-4.5-air",
2783      "name": "Z.AI: GLM 4.5 Air",
2784      "cost_per_1m_in": 0.13,
2785      "cost_per_1m_out": 0.85,
2786      "cost_per_1m_in_cached": 0,
2787      "cost_per_1m_out_cached": 0,
2788      "context_window": 131072,
2789      "default_max_tokens": 49152,
2790      "can_reason": true,
2791      "reasoning_levels": [
2792        "low",
2793        "medium",
2794        "high"
2795      ],
2796      "default_reasoning_effort": "medium",
2797      "supports_attachments": false,
2798      "options": {}
2799    },
2800    {
2801      "id": "z-ai/glm-4.5-air:free",
2802      "name": "Z.AI: GLM 4.5 Air (free)",
2803      "cost_per_1m_in": 0,
2804      "cost_per_1m_out": 0,
2805      "cost_per_1m_in_cached": 0,
2806      "cost_per_1m_out_cached": 0,
2807      "context_window": 131072,
2808      "default_max_tokens": 48000,
2809      "can_reason": true,
2810      "reasoning_levels": [
2811        "low",
2812        "medium",
2813        "high"
2814      ],
2815      "default_reasoning_effort": "medium",
2816      "supports_attachments": false,
2817      "options": {}
2818    },
2819    {
2820      "id": "z-ai/glm-4.5v",
2821      "name": "Z.AI: GLM 4.5V",
2822      "cost_per_1m_in": 0.6,
2823      "cost_per_1m_out": 1.7999999999999998,
2824      "cost_per_1m_in_cached": 0,
2825      "cost_per_1m_out_cached": 0.11,
2826      "context_window": 65536,
2827      "default_max_tokens": 8192,
2828      "can_reason": true,
2829      "reasoning_levels": [
2830        "low",
2831        "medium",
2832        "high"
2833      ],
2834      "default_reasoning_effort": "medium",
2835      "supports_attachments": true,
2836      "options": {}
2837    },
2838    {
2839      "id": "z-ai/glm-4.6",
2840      "name": "Z.AI: GLM 4.6",
2841      "cost_per_1m_in": 0.5,
2842      "cost_per_1m_out": 1.9,
2843      "cost_per_1m_in_cached": 0,
2844      "cost_per_1m_out_cached": 0,
2845      "context_window": 204800,
2846      "default_max_tokens": 102400,
2847      "can_reason": true,
2848      "reasoning_levels": [
2849        "low",
2850        "medium",
2851        "high"
2852      ],
2853      "default_reasoning_effort": "medium",
2854      "supports_attachments": false,
2855      "options": {}
2856    },
2857    {
2858      "id": "z-ai/glm-4.6:exacto",
2859      "name": "Z.AI: GLM 4.6 (exacto)",
2860      "cost_per_1m_in": 0.6,
2861      "cost_per_1m_out": 1.9,
2862      "cost_per_1m_in_cached": 0,
2863      "cost_per_1m_out_cached": 0,
2864      "context_window": 202752,
2865      "default_max_tokens": 20275,
2866      "can_reason": true,
2867      "reasoning_levels": [
2868        "low",
2869        "medium",
2870        "high"
2871      ],
2872      "default_reasoning_effort": "medium",
2873      "supports_attachments": false,
2874      "options": {}
2875    },
2876    {
2877      "id": "inclusionai/ling-1t",
2878      "name": "inclusionAI: Ling-1T",
2879      "cost_per_1m_in": 0.5700000000000001,
2880      "cost_per_1m_out": 2.2800000000000002,
2881      "cost_per_1m_in_cached": 0,
2882      "cost_per_1m_out_cached": 0,
2883      "context_window": 131072,
2884      "default_max_tokens": 65536,
2885      "can_reason": false,
2886      "supports_attachments": false,
2887      "options": {}
2888    },
2889    {
2890      "id": "inclusionai/ring-1t",
2891      "name": "inclusionAI: Ring 1T",
2892      "cost_per_1m_in": 0.5700000000000001,
2893      "cost_per_1m_out": 2.2800000000000002,
2894      "cost_per_1m_in_cached": 0,
2895      "cost_per_1m_out_cached": 0,
2896      "context_window": 131072,
2897      "default_max_tokens": 65536,
2898      "can_reason": true,
2899      "reasoning_levels": [
2900        "low",
2901        "medium",
2902        "high"
2903      ],
2904      "default_reasoning_effort": "medium",
2905      "supports_attachments": false,
2906      "options": {}
2907    },
2908    {
2909      "id": "x-ai/grok-3",
2910      "name": "xAI: Grok 3",
2911      "cost_per_1m_in": 3,
2912      "cost_per_1m_out": 15,
2913      "cost_per_1m_in_cached": 0,
2914      "cost_per_1m_out_cached": 0.75,
2915      "context_window": 131072,
2916      "default_max_tokens": 13107,
2917      "can_reason": false,
2918      "supports_attachments": false,
2919      "options": {}
2920    },
2921    {
2922      "id": "x-ai/grok-3-beta",
2923      "name": "xAI: Grok 3 Beta",
2924      "cost_per_1m_in": 3,
2925      "cost_per_1m_out": 15,
2926      "cost_per_1m_in_cached": 0,
2927      "cost_per_1m_out_cached": 0.75,
2928      "context_window": 131072,
2929      "default_max_tokens": 13107,
2930      "can_reason": false,
2931      "supports_attachments": false,
2932      "options": {}
2933    },
2934    {
2935      "id": "x-ai/grok-3-mini",
2936      "name": "xAI: Grok 3 Mini",
2937      "cost_per_1m_in": 0.3,
2938      "cost_per_1m_out": 0.5,
2939      "cost_per_1m_in_cached": 0,
2940      "cost_per_1m_out_cached": 0.075,
2941      "context_window": 131072,
2942      "default_max_tokens": 13107,
2943      "can_reason": true,
2944      "reasoning_levels": [
2945        "low",
2946        "medium",
2947        "high"
2948      ],
2949      "default_reasoning_effort": "medium",
2950      "supports_attachments": false,
2951      "options": {}
2952    },
2953    {
2954      "id": "x-ai/grok-3-mini-beta",
2955      "name": "xAI: Grok 3 Mini Beta",
2956      "cost_per_1m_in": 0.3,
2957      "cost_per_1m_out": 0.5,
2958      "cost_per_1m_in_cached": 0,
2959      "cost_per_1m_out_cached": 0.075,
2960      "context_window": 131072,
2961      "default_max_tokens": 13107,
2962      "can_reason": true,
2963      "reasoning_levels": [
2964        "low",
2965        "medium",
2966        "high"
2967      ],
2968      "default_reasoning_effort": "medium",
2969      "supports_attachments": false,
2970      "options": {}
2971    },
2972    {
2973      "id": "x-ai/grok-4",
2974      "name": "xAI: Grok 4",
2975      "cost_per_1m_in": 3,
2976      "cost_per_1m_out": 15,
2977      "cost_per_1m_in_cached": 0,
2978      "cost_per_1m_out_cached": 0.75,
2979      "context_window": 256000,
2980      "default_max_tokens": 25600,
2981      "can_reason": true,
2982      "reasoning_levels": [
2983        "low",
2984        "medium",
2985        "high"
2986      ],
2987      "default_reasoning_effort": "medium",
2988      "supports_attachments": true,
2989      "options": {}
2990    },
2991    {
2992      "id": "x-ai/grok-4-fast",
2993      "name": "xAI: Grok 4 Fast",
2994      "cost_per_1m_in": 0.19999999999999998,
2995      "cost_per_1m_out": 0.5,
2996      "cost_per_1m_in_cached": 0,
2997      "cost_per_1m_out_cached": 0.049999999999999996,
2998      "context_window": 2000000,
2999      "default_max_tokens": 15000,
3000      "can_reason": true,
3001      "reasoning_levels": [
3002        "low",
3003        "medium",
3004        "high"
3005      ],
3006      "default_reasoning_effort": "medium",
3007      "supports_attachments": true,
3008      "options": {}
3009    },
3010    {
3011      "id": "x-ai/grok-code-fast-1",
3012      "name": "xAI: Grok Code Fast 1",
3013      "cost_per_1m_in": 0.19999999999999998,
3014      "cost_per_1m_out": 1.5,
3015      "cost_per_1m_in_cached": 0,
3016      "cost_per_1m_out_cached": 0.02,
3017      "context_window": 256000,
3018      "default_max_tokens": 5000,
3019      "can_reason": true,
3020      "reasoning_levels": [
3021        "low",
3022        "medium",
3023        "high"
3024      ],
3025      "default_reasoning_effort": "medium",
3026      "supports_attachments": false,
3027      "options": {}
3028    }
3029  ],
3030  "default_headers": {
3031    "HTTP-Referer": "https://charm.land",
3032    "X-Title": "Crush"
3033  }
3034}