openrouter.json

   1{
   2  "name": "OpenRouter",
   3  "id": "openrouter",
   4  "api_key": "$OPENROUTER_API_KEY",
   5  "api_endpoint": "https://openrouter.ai/api/v1",
   6  "type": "openrouter",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-3.5-haiku",
   9  "models": [
  10    {
  11      "id": "ai21/jamba-large-1.7",
  12      "name": "AI21: Jamba Large 1.7",
  13      "cost_per_1m_in": 2,
  14      "cost_per_1m_out": 8,
  15      "cost_per_1m_in_cached": 0,
  16      "cost_per_1m_out_cached": 0,
  17      "context_window": 256000,
  18      "default_max_tokens": 2048,
  19      "can_reason": false,
  20      "supports_attachments": false,
  21      "options": {}
  22    },
  23    {
  24      "id": "ai21/jamba-mini-1.7",
  25      "name": "AI21: Jamba Mini 1.7",
  26      "cost_per_1m_in": 0.19999999999999998,
  27      "cost_per_1m_out": 0.39999999999999997,
  28      "cost_per_1m_in_cached": 0,
  29      "cost_per_1m_out_cached": 0,
  30      "context_window": 256000,
  31      "default_max_tokens": 2048,
  32      "can_reason": false,
  33      "supports_attachments": false,
  34      "options": {}
  35    },
  36    {
  37      "id": "allenai/olmo-3.1-32b-instruct",
  38      "name": "AllenAI: Olmo 3.1 32B Instruct",
  39      "cost_per_1m_in": 0.19999999999999998,
  40      "cost_per_1m_out": 0.6,
  41      "cost_per_1m_in_cached": 0,
  42      "cost_per_1m_out_cached": 0,
  43      "context_window": 65536,
  44      "default_max_tokens": 6553,
  45      "can_reason": false,
  46      "supports_attachments": false,
  47      "options": {}
  48    },
  49    {
  50      "id": "amazon/nova-2-lite-v1",
  51      "name": "Amazon: Nova 2 Lite",
  52      "cost_per_1m_in": 0.3,
  53      "cost_per_1m_out": 2.5,
  54      "cost_per_1m_in_cached": 0,
  55      "cost_per_1m_out_cached": 0,
  56      "context_window": 1000000,
  57      "default_max_tokens": 32767,
  58      "can_reason": true,
  59      "reasoning_levels": [
  60        "low",
  61        "medium",
  62        "high"
  63      ],
  64      "default_reasoning_effort": "medium",
  65      "supports_attachments": true,
  66      "options": {}
  67    },
  68    {
  69      "id": "amazon/nova-lite-v1",
  70      "name": "Amazon: Nova Lite 1.0",
  71      "cost_per_1m_in": 0.06,
  72      "cost_per_1m_out": 0.24,
  73      "cost_per_1m_in_cached": 0,
  74      "cost_per_1m_out_cached": 0,
  75      "context_window": 300000,
  76      "default_max_tokens": 2560,
  77      "can_reason": false,
  78      "supports_attachments": true,
  79      "options": {}
  80    },
  81    {
  82      "id": "amazon/nova-micro-v1",
  83      "name": "Amazon: Nova Micro 1.0",
  84      "cost_per_1m_in": 0.035,
  85      "cost_per_1m_out": 0.14,
  86      "cost_per_1m_in_cached": 0,
  87      "cost_per_1m_out_cached": 0,
  88      "context_window": 128000,
  89      "default_max_tokens": 2560,
  90      "can_reason": false,
  91      "supports_attachments": false,
  92      "options": {}
  93    },
  94    {
  95      "id": "amazon/nova-premier-v1",
  96      "name": "Amazon: Nova Premier 1.0",
  97      "cost_per_1m_in": 2.5,
  98      "cost_per_1m_out": 12.5,
  99      "cost_per_1m_in_cached": 0,
 100      "cost_per_1m_out_cached": 0.625,
 101      "context_window": 1000000,
 102      "default_max_tokens": 16000,
 103      "can_reason": false,
 104      "supports_attachments": true,
 105      "options": {}
 106    },
 107    {
 108      "id": "amazon/nova-pro-v1",
 109      "name": "Amazon: Nova Pro 1.0",
 110      "cost_per_1m_in": 0.7999999999999999,
 111      "cost_per_1m_out": 3.1999999999999997,
 112      "cost_per_1m_in_cached": 0,
 113      "cost_per_1m_out_cached": 0,
 114      "context_window": 300000,
 115      "default_max_tokens": 2560,
 116      "can_reason": false,
 117      "supports_attachments": true,
 118      "options": {}
 119    },
 120    {
 121      "id": "anthropic/claude-3-haiku",
 122      "name": "Anthropic: Claude 3 Haiku",
 123      "cost_per_1m_in": 0.25,
 124      "cost_per_1m_out": 1.25,
 125      "cost_per_1m_in_cached": 0.3,
 126      "cost_per_1m_out_cached": 0.03,
 127      "context_window": 200000,
 128      "default_max_tokens": 2048,
 129      "can_reason": false,
 130      "supports_attachments": true,
 131      "options": {}
 132    },
 133    {
 134      "id": "anthropic/claude-3.5-haiku",
 135      "name": "Anthropic: Claude 3.5 Haiku",
 136      "cost_per_1m_in": 0.7999999999999999,
 137      "cost_per_1m_out": 4,
 138      "cost_per_1m_in_cached": 1,
 139      "cost_per_1m_out_cached": 0.08,
 140      "context_window": 200000,
 141      "default_max_tokens": 4096,
 142      "can_reason": false,
 143      "supports_attachments": true,
 144      "options": {}
 145    },
 146    {
 147      "id": "anthropic/claude-3.5-sonnet",
 148      "name": "Anthropic: Claude 3.5 Sonnet",
 149      "cost_per_1m_in": 6,
 150      "cost_per_1m_out": 30,
 151      "cost_per_1m_in_cached": 0,
 152      "cost_per_1m_out_cached": 0,
 153      "context_window": 200000,
 154      "default_max_tokens": 4096,
 155      "can_reason": false,
 156      "supports_attachments": true,
 157      "options": {}
 158    },
 159    {
 160      "id": "anthropic/claude-3.7-sonnet",
 161      "name": "Anthropic: Claude 3.7 Sonnet",
 162      "cost_per_1m_in": 3,
 163      "cost_per_1m_out": 15,
 164      "cost_per_1m_in_cached": 3.75,
 165      "cost_per_1m_out_cached": 0.3,
 166      "context_window": 200000,
 167      "default_max_tokens": 64000,
 168      "can_reason": true,
 169      "reasoning_levels": [
 170        "low",
 171        "medium",
 172        "high"
 173      ],
 174      "default_reasoning_effort": "medium",
 175      "supports_attachments": true,
 176      "options": {}
 177    },
 178    {
 179      "id": "anthropic/claude-3.7-sonnet:thinking",
 180      "name": "Anthropic: Claude 3.7 Sonnet (thinking)",
 181      "cost_per_1m_in": 3,
 182      "cost_per_1m_out": 15,
 183      "cost_per_1m_in_cached": 3.75,
 184      "cost_per_1m_out_cached": 0.3,
 185      "context_window": 200000,
 186      "default_max_tokens": 32000,
 187      "can_reason": true,
 188      "reasoning_levels": [
 189        "low",
 190        "medium",
 191        "high"
 192      ],
 193      "default_reasoning_effort": "medium",
 194      "supports_attachments": true,
 195      "options": {}
 196    },
 197    {
 198      "id": "anthropic/claude-haiku-4.5",
 199      "name": "Anthropic: Claude Haiku 4.5",
 200      "cost_per_1m_in": 1,
 201      "cost_per_1m_out": 5,
 202      "cost_per_1m_in_cached": 1.25,
 203      "cost_per_1m_out_cached": 0.09999999999999999,
 204      "context_window": 200000,
 205      "default_max_tokens": 32000,
 206      "can_reason": true,
 207      "reasoning_levels": [
 208        "low",
 209        "medium",
 210        "high"
 211      ],
 212      "default_reasoning_effort": "medium",
 213      "supports_attachments": true,
 214      "options": {}
 215    },
 216    {
 217      "id": "anthropic/claude-opus-4",
 218      "name": "Anthropic: Claude Opus 4",
 219      "cost_per_1m_in": 15,
 220      "cost_per_1m_out": 75,
 221      "cost_per_1m_in_cached": 18.75,
 222      "cost_per_1m_out_cached": 1.5,
 223      "context_window": 200000,
 224      "default_max_tokens": 16000,
 225      "can_reason": true,
 226      "reasoning_levels": [
 227        "low",
 228        "medium",
 229        "high"
 230      ],
 231      "default_reasoning_effort": "medium",
 232      "supports_attachments": true,
 233      "options": {}
 234    },
 235    {
 236      "id": "anthropic/claude-opus-4.1",
 237      "name": "Anthropic: Claude Opus 4.1",
 238      "cost_per_1m_in": 15,
 239      "cost_per_1m_out": 75,
 240      "cost_per_1m_in_cached": 18.75,
 241      "cost_per_1m_out_cached": 1.5,
 242      "context_window": 200000,
 243      "default_max_tokens": 16000,
 244      "can_reason": true,
 245      "reasoning_levels": [
 246        "low",
 247        "medium",
 248        "high"
 249      ],
 250      "default_reasoning_effort": "medium",
 251      "supports_attachments": true,
 252      "options": {}
 253    },
 254    {
 255      "id": "anthropic/claude-opus-4.5",
 256      "name": "Anthropic: Claude Opus 4.5",
 257      "cost_per_1m_in": 5,
 258      "cost_per_1m_out": 25,
 259      "cost_per_1m_in_cached": 6.25,
 260      "cost_per_1m_out_cached": 0.5,
 261      "context_window": 200000,
 262      "default_max_tokens": 32000,
 263      "can_reason": true,
 264      "reasoning_levels": [
 265        "low",
 266        "medium",
 267        "high"
 268      ],
 269      "default_reasoning_effort": "medium",
 270      "supports_attachments": true,
 271      "options": {}
 272    },
 273    {
 274      "id": "anthropic/claude-sonnet-4",
 275      "name": "Anthropic: Claude Sonnet 4",
 276      "cost_per_1m_in": 3,
 277      "cost_per_1m_out": 15,
 278      "cost_per_1m_in_cached": 3.75,
 279      "cost_per_1m_out_cached": 0.3,
 280      "context_window": 1000000,
 281      "default_max_tokens": 32000,
 282      "can_reason": true,
 283      "reasoning_levels": [
 284        "low",
 285        "medium",
 286        "high"
 287      ],
 288      "default_reasoning_effort": "medium",
 289      "supports_attachments": true,
 290      "options": {}
 291    },
 292    {
 293      "id": "anthropic/claude-sonnet-4.5",
 294      "name": "Anthropic: Claude Sonnet 4.5",
 295      "cost_per_1m_in": 3,
 296      "cost_per_1m_out": 15,
 297      "cost_per_1m_in_cached": 3.75,
 298      "cost_per_1m_out_cached": 0.3,
 299      "context_window": 1000000,
 300      "default_max_tokens": 32000,
 301      "can_reason": true,
 302      "reasoning_levels": [
 303        "low",
 304        "medium",
 305        "high"
 306      ],
 307      "default_reasoning_effort": "medium",
 308      "supports_attachments": true,
 309      "options": {}
 310    },
 311    {
 312      "id": "arcee-ai/trinity-mini",
 313      "name": "Arcee AI: Trinity Mini",
 314      "cost_per_1m_in": 0.045,
 315      "cost_per_1m_out": 0.15,
 316      "cost_per_1m_in_cached": 0,
 317      "cost_per_1m_out_cached": 0,
 318      "context_window": 131072,
 319      "default_max_tokens": 65536,
 320      "can_reason": true,
 321      "reasoning_levels": [
 322        "low",
 323        "medium",
 324        "high"
 325      ],
 326      "default_reasoning_effort": "medium",
 327      "supports_attachments": false,
 328      "options": {}
 329    },
 330    {
 331      "id": "arcee-ai/trinity-mini:free",
 332      "name": "Arcee AI: Trinity Mini (free)",
 333      "cost_per_1m_in": 0,
 334      "cost_per_1m_out": 0,
 335      "cost_per_1m_in_cached": 0,
 336      "cost_per_1m_out_cached": 0,
 337      "context_window": 131072,
 338      "default_max_tokens": 13107,
 339      "can_reason": true,
 340      "reasoning_levels": [
 341        "low",
 342        "medium",
 343        "high"
 344      ],
 345      "default_reasoning_effort": "medium",
 346      "supports_attachments": false,
 347      "options": {}
 348    },
 349    {
 350      "id": "arcee-ai/virtuoso-large",
 351      "name": "Arcee AI: Virtuoso Large",
 352      "cost_per_1m_in": 0.75,
 353      "cost_per_1m_out": 1.2,
 354      "cost_per_1m_in_cached": 0,
 355      "cost_per_1m_out_cached": 0,
 356      "context_window": 131072,
 357      "default_max_tokens": 32000,
 358      "can_reason": false,
 359      "supports_attachments": false,
 360      "options": {}
 361    },
 362    {
 363      "id": "baidu/ernie-4.5-21b-a3b",
 364      "name": "Baidu: ERNIE 4.5 21B A3B",
 365      "cost_per_1m_in": 0.07,
 366      "cost_per_1m_out": 0.28,
 367      "cost_per_1m_in_cached": 0,
 368      "cost_per_1m_out_cached": 0,
 369      "context_window": 120000,
 370      "default_max_tokens": 4000,
 371      "can_reason": false,
 372      "supports_attachments": false,
 373      "options": {}
 374    },
 375    {
 376      "id": "baidu/ernie-4.5-vl-28b-a3b",
 377      "name": "Baidu: ERNIE 4.5 VL 28B A3B",
 378      "cost_per_1m_in": 0.14,
 379      "cost_per_1m_out": 0.56,
 380      "cost_per_1m_in_cached": 0,
 381      "cost_per_1m_out_cached": 0,
 382      "context_window": 30000,
 383      "default_max_tokens": 4000,
 384      "can_reason": true,
 385      "reasoning_levels": [
 386        "low",
 387        "medium",
 388        "high"
 389      ],
 390      "default_reasoning_effort": "medium",
 391      "supports_attachments": true,
 392      "options": {}
 393    },
 394    {
 395      "id": "bytedance-seed/seed-1.6",
 396      "name": "ByteDance Seed: Seed 1.6",
 397      "cost_per_1m_in": 0.25,
 398      "cost_per_1m_out": 2,
 399      "cost_per_1m_in_cached": 0,
 400      "cost_per_1m_out_cached": 0,
 401      "context_window": 262144,
 402      "default_max_tokens": 16384,
 403      "can_reason": true,
 404      "reasoning_levels": [
 405        "low",
 406        "medium",
 407        "high"
 408      ],
 409      "default_reasoning_effort": "medium",
 410      "supports_attachments": true,
 411      "options": {}
 412    },
 413    {
 414      "id": "bytedance-seed/seed-1.6-flash",
 415      "name": "ByteDance Seed: Seed 1.6 Flash",
 416      "cost_per_1m_in": 0.075,
 417      "cost_per_1m_out": 0.3,
 418      "cost_per_1m_in_cached": 0,
 419      "cost_per_1m_out_cached": 0,
 420      "context_window": 262144,
 421      "default_max_tokens": 8192,
 422      "can_reason": true,
 423      "reasoning_levels": [
 424        "low",
 425        "medium",
 426        "high"
 427      ],
 428      "default_reasoning_effort": "medium",
 429      "supports_attachments": true,
 430      "options": {}
 431    },
 432    {
 433      "id": "deepcogito/cogito-v2-preview-llama-109b-moe",
 434      "name": "Cogito V2 Preview Llama 109B",
 435      "cost_per_1m_in": 0.18,
 436      "cost_per_1m_out": 0.59,
 437      "cost_per_1m_in_cached": 0,
 438      "cost_per_1m_out_cached": 0,
 439      "context_window": 32767,
 440      "default_max_tokens": 3276,
 441      "can_reason": true,
 442      "reasoning_levels": [
 443        "low",
 444        "medium",
 445        "high"
 446      ],
 447      "default_reasoning_effort": "medium",
 448      "supports_attachments": true,
 449      "options": {}
 450    },
 451    {
 452      "id": "cohere/command-r-08-2024",
 453      "name": "Cohere: Command R (08-2024)",
 454      "cost_per_1m_in": 0.15,
 455      "cost_per_1m_out": 0.6,
 456      "cost_per_1m_in_cached": 0,
 457      "cost_per_1m_out_cached": 0,
 458      "context_window": 128000,
 459      "default_max_tokens": 2000,
 460      "can_reason": false,
 461      "supports_attachments": false,
 462      "options": {}
 463    },
 464    {
 465      "id": "cohere/command-r-plus-08-2024",
 466      "name": "Cohere: Command R+ (08-2024)",
 467      "cost_per_1m_in": 2.5,
 468      "cost_per_1m_out": 10,
 469      "cost_per_1m_in_cached": 0,
 470      "cost_per_1m_out_cached": 0,
 471      "context_window": 128000,
 472      "default_max_tokens": 2000,
 473      "can_reason": false,
 474      "supports_attachments": false,
 475      "options": {}
 476    },
 477    {
 478      "id": "deepcogito/cogito-v2-preview-llama-405b",
 479      "name": "Deep Cogito: Cogito V2 Preview Llama 405B",
 480      "cost_per_1m_in": 3.5,
 481      "cost_per_1m_out": 3.5,
 482      "cost_per_1m_in_cached": 0,
 483      "cost_per_1m_out_cached": 0,
 484      "context_window": 32768,
 485      "default_max_tokens": 3276,
 486      "can_reason": true,
 487      "reasoning_levels": [
 488        "low",
 489        "medium",
 490        "high"
 491      ],
 492      "default_reasoning_effort": "medium",
 493      "supports_attachments": false,
 494      "options": {}
 495    },
 496    {
 497      "id": "deepcogito/cogito-v2-preview-llama-70b",
 498      "name": "Deep Cogito: Cogito V2 Preview Llama 70B",
 499      "cost_per_1m_in": 0.88,
 500      "cost_per_1m_out": 0.88,
 501      "cost_per_1m_in_cached": 0,
 502      "cost_per_1m_out_cached": 0,
 503      "context_window": 32768,
 504      "default_max_tokens": 3276,
 505      "can_reason": true,
 506      "reasoning_levels": [
 507        "low",
 508        "medium",
 509        "high"
 510      ],
 511      "default_reasoning_effort": "medium",
 512      "supports_attachments": false,
 513      "options": {}
 514    },
 515    {
 516      "id": "deepseek/deepseek-chat",
 517      "name": "DeepSeek: DeepSeek V3",
 518      "cost_per_1m_in": 0.39999999999999997,
 519      "cost_per_1m_out": 1.3,
 520      "cost_per_1m_in_cached": 0,
 521      "cost_per_1m_out_cached": 0,
 522      "context_window": 64000,
 523      "default_max_tokens": 8000,
 524      "can_reason": false,
 525      "supports_attachments": false,
 526      "options": {}
 527    },
 528    {
 529      "id": "deepseek/deepseek-chat-v3-0324",
 530      "name": "DeepSeek: DeepSeek V3 0324",
 531      "cost_per_1m_in": 0.77,
 532      "cost_per_1m_out": 0.77,
 533      "cost_per_1m_in_cached": 0,
 534      "cost_per_1m_out_cached": 0,
 535      "context_window": 163840,
 536      "default_max_tokens": 65536,
 537      "can_reason": true,
 538      "reasoning_levels": [
 539        "low",
 540        "medium",
 541        "high"
 542      ],
 543      "default_reasoning_effort": "medium",
 544      "supports_attachments": false,
 545      "options": {}
 546    },
 547    {
 548      "id": "deepseek/deepseek-chat-v3.1",
 549      "name": "DeepSeek: DeepSeek V3.1",
 550      "cost_per_1m_in": 0.21,
 551      "cost_per_1m_out": 0.7899999999999999,
 552      "cost_per_1m_in_cached": 0,
 553      "cost_per_1m_out_cached": 0.16799999999999998,
 554      "context_window": 163840,
 555      "default_max_tokens": 16384,
 556      "can_reason": true,
 557      "reasoning_levels": [
 558        "low",
 559        "medium",
 560        "high"
 561      ],
 562      "default_reasoning_effort": "medium",
 563      "supports_attachments": false,
 564      "options": {}
 565    },
 566    {
 567      "id": "deepseek/deepseek-v3.1-terminus",
 568      "name": "DeepSeek: DeepSeek V3.1 Terminus",
 569      "cost_per_1m_in": 0.22999999999999998,
 570      "cost_per_1m_out": 0.8999999999999999,
 571      "cost_per_1m_in_cached": 0,
 572      "cost_per_1m_out_cached": 0,
 573      "context_window": 163840,
 574      "default_max_tokens": 32768,
 575      "can_reason": true,
 576      "reasoning_levels": [
 577        "low",
 578        "medium",
 579        "high"
 580      ],
 581      "default_reasoning_effort": "medium",
 582      "supports_attachments": false,
 583      "options": {}
 584    },
 585    {
 586      "id": "deepseek/deepseek-v3.1-terminus:exacto",
 587      "name": "DeepSeek: DeepSeek V3.1 Terminus (exacto)",
 588      "cost_per_1m_in": 0.21,
 589      "cost_per_1m_out": 0.7899999999999999,
 590      "cost_per_1m_in_cached": 0,
 591      "cost_per_1m_out_cached": 0.16799999999999998,
 592      "context_window": 163840,
 593      "default_max_tokens": 16384,
 594      "can_reason": true,
 595      "reasoning_levels": [
 596        "low",
 597        "medium",
 598        "high"
 599      ],
 600      "default_reasoning_effort": "medium",
 601      "supports_attachments": false,
 602      "options": {}
 603    },
 604    {
 605      "id": "deepseek/deepseek-v3.2",
 606      "name": "DeepSeek: DeepSeek V3.2",
 607      "cost_per_1m_in": 0.26,
 608      "cost_per_1m_out": 0.39,
 609      "cost_per_1m_in_cached": 0,
 610      "cost_per_1m_out_cached": 0.13,
 611      "context_window": 163840,
 612      "default_max_tokens": 16384,
 613      "can_reason": true,
 614      "reasoning_levels": [
 615        "low",
 616        "medium",
 617        "high"
 618      ],
 619      "default_reasoning_effort": "medium",
 620      "supports_attachments": false,
 621      "options": {}
 622    },
 623    {
 624      "id": "deepseek/deepseek-v3.2-exp",
 625      "name": "DeepSeek: DeepSeek V3.2 Exp",
 626      "cost_per_1m_in": 0.27,
 627      "cost_per_1m_out": 0.41,
 628      "cost_per_1m_in_cached": 0,
 629      "cost_per_1m_out_cached": 0,
 630      "context_window": 163840,
 631      "default_max_tokens": 32768,
 632      "can_reason": true,
 633      "reasoning_levels": [
 634        "low",
 635        "medium",
 636        "high"
 637      ],
 638      "default_reasoning_effort": "medium",
 639      "supports_attachments": false,
 640      "options": {}
 641    },
 642    {
 643      "id": "deepseek/deepseek-r1",
 644      "name": "DeepSeek: R1",
 645      "cost_per_1m_in": 0.7,
 646      "cost_per_1m_out": 2.5,
 647      "cost_per_1m_in_cached": 0,
 648      "cost_per_1m_out_cached": 0,
 649      "context_window": 64000,
 650      "default_max_tokens": 8000,
 651      "can_reason": true,
 652      "reasoning_levels": [
 653        "low",
 654        "medium",
 655        "high"
 656      ],
 657      "default_reasoning_effort": "medium",
 658      "supports_attachments": false,
 659      "options": {}
 660    },
 661    {
 662      "id": "deepseek/deepseek-r1-0528",
 663      "name": "DeepSeek: R1 0528",
 664      "cost_per_1m_in": 0.7999999999999999,
 665      "cost_per_1m_out": 2.4,
 666      "cost_per_1m_in_cached": 0,
 667      "cost_per_1m_out_cached": 0,
 668      "context_window": 163840,
 669      "default_max_tokens": 16384,
 670      "can_reason": true,
 671      "reasoning_levels": [
 672        "low",
 673        "medium",
 674        "high"
 675      ],
 676      "default_reasoning_effort": "medium",
 677      "supports_attachments": false,
 678      "options": {}
 679    },
 680    {
 681      "id": "deepseek/deepseek-r1-distill-llama-70b",
 682      "name": "DeepSeek: R1 Distill Llama 70B",
 683      "cost_per_1m_in": 0.03,
 684      "cost_per_1m_out": 0.11,
 685      "cost_per_1m_in_cached": 0,
 686      "cost_per_1m_out_cached": 0,
 687      "context_window": 131072,
 688      "default_max_tokens": 65536,
 689      "can_reason": true,
 690      "reasoning_levels": [
 691        "low",
 692        "medium",
 693        "high"
 694      ],
 695      "default_reasoning_effort": "medium",
 696      "supports_attachments": false,
 697      "options": {}
 698    },
 699    {
 700      "id": "google/gemini-2.0-flash-001",
 701      "name": "Google: Gemini 2.0 Flash",
 702      "cost_per_1m_in": 0.09999999999999999,
 703      "cost_per_1m_out": 0.39999999999999997,
 704      "cost_per_1m_in_cached": 0.18330000000000002,
 705      "cost_per_1m_out_cached": 0.024999999999999998,
 706      "context_window": 1048576,
 707      "default_max_tokens": 4096,
 708      "can_reason": false,
 709      "supports_attachments": true,
 710      "options": {}
 711    },
 712    {
 713      "id": "google/gemini-2.0-flash-exp:free",
 714      "name": "Google: Gemini 2.0 Flash Experimental (free)",
 715      "cost_per_1m_in": 0,
 716      "cost_per_1m_out": 0,
 717      "cost_per_1m_in_cached": 0,
 718      "cost_per_1m_out_cached": 0,
 719      "context_window": 1048576,
 720      "default_max_tokens": 4096,
 721      "can_reason": false,
 722      "supports_attachments": true,
 723      "options": {}
 724    },
 725    {
 726      "id": "google/gemini-2.0-flash-lite-001",
 727      "name": "Google: Gemini 2.0 Flash Lite",
 728      "cost_per_1m_in": 0.075,
 729      "cost_per_1m_out": 0.3,
 730      "cost_per_1m_in_cached": 0,
 731      "cost_per_1m_out_cached": 0,
 732      "context_window": 1048576,
 733      "default_max_tokens": 4096,
 734      "can_reason": false,
 735      "supports_attachments": true,
 736      "options": {}
 737    },
 738    {
 739      "id": "google/gemini-2.5-flash",
 740      "name": "Google: Gemini 2.5 Flash",
 741      "cost_per_1m_in": 0.3,
 742      "cost_per_1m_out": 2.5,
 743      "cost_per_1m_in_cached": 0.3833,
 744      "cost_per_1m_out_cached": 0.03,
 745      "context_window": 1048576,
 746      "default_max_tokens": 32767,
 747      "can_reason": true,
 748      "reasoning_levels": [
 749        "low",
 750        "medium",
 751        "high"
 752      ],
 753      "default_reasoning_effort": "medium",
 754      "supports_attachments": true,
 755      "options": {}
 756    },
 757    {
 758      "id": "google/gemini-2.5-flash-lite",
 759      "name": "Google: Gemini 2.5 Flash Lite",
 760      "cost_per_1m_in": 0.09999999999999999,
 761      "cost_per_1m_out": 0.39999999999999997,
 762      "cost_per_1m_in_cached": 0.18330000000000002,
 763      "cost_per_1m_out_cached": 0.01,
 764      "context_window": 1048576,
 765      "default_max_tokens": 32767,
 766      "can_reason": true,
 767      "reasoning_levels": [
 768        "low",
 769        "medium",
 770        "high"
 771      ],
 772      "default_reasoning_effort": "medium",
 773      "supports_attachments": true,
 774      "options": {}
 775    },
 776    {
 777      "id": "google/gemini-2.5-flash-lite-preview-09-2025",
 778      "name": "Google: Gemini 2.5 Flash Lite Preview 09-2025",
 779      "cost_per_1m_in": 0.09999999999999999,
 780      "cost_per_1m_out": 0.39999999999999997,
 781      "cost_per_1m_in_cached": 1,
 782      "cost_per_1m_out_cached": 0.01,
 783      "context_window": 1048576,
 784      "default_max_tokens": 32767,
 785      "can_reason": true,
 786      "reasoning_levels": [
 787        "low",
 788        "medium",
 789        "high"
 790      ],
 791      "default_reasoning_effort": "medium",
 792      "supports_attachments": true,
 793      "options": {}
 794    },
 795    {
 796      "id": "google/gemini-2.5-flash-preview-09-2025",
 797      "name": "Google: Gemini 2.5 Flash Preview 09-2025",
 798      "cost_per_1m_in": 0.3,
 799      "cost_per_1m_out": 2.5,
 800      "cost_per_1m_in_cached": 0.3833,
 801      "cost_per_1m_out_cached": 0.075,
 802      "context_window": 1048576,
 803      "default_max_tokens": 32768,
 804      "can_reason": true,
 805      "reasoning_levels": [
 806        "low",
 807        "medium",
 808        "high"
 809      ],
 810      "default_reasoning_effort": "medium",
 811      "supports_attachments": true,
 812      "options": {}
 813    },
 814    {
 815      "id": "google/gemini-2.5-pro",
 816      "name": "Google: Gemini 2.5 Pro",
 817      "cost_per_1m_in": 1.25,
 818      "cost_per_1m_out": 10,
 819      "cost_per_1m_in_cached": 1.625,
 820      "cost_per_1m_out_cached": 0.125,
 821      "context_window": 1048576,
 822      "default_max_tokens": 32768,
 823      "can_reason": true,
 824      "reasoning_levels": [
 825        "low",
 826        "medium",
 827        "high"
 828      ],
 829      "default_reasoning_effort": "medium",
 830      "supports_attachments": true,
 831      "options": {}
 832    },
 833    {
 834      "id": "google/gemini-2.5-pro-preview-05-06",
 835      "name": "Google: Gemini 2.5 Pro Preview 05-06",
 836      "cost_per_1m_in": 1.25,
 837      "cost_per_1m_out": 10,
 838      "cost_per_1m_in_cached": 1.625,
 839      "cost_per_1m_out_cached": 0.125,
 840      "context_window": 1048576,
 841      "default_max_tokens": 32768,
 842      "can_reason": true,
 843      "reasoning_levels": [
 844        "low",
 845        "medium",
 846        "high"
 847      ],
 848      "default_reasoning_effort": "medium",
 849      "supports_attachments": true,
 850      "options": {}
 851    },
 852    {
 853      "id": "google/gemini-2.5-pro-preview",
 854      "name": "Google: Gemini 2.5 Pro Preview 06-05",
 855      "cost_per_1m_in": 1.25,
 856      "cost_per_1m_out": 10,
 857      "cost_per_1m_in_cached": 1.625,
 858      "cost_per_1m_out_cached": 0.125,
 859      "context_window": 1048576,
 860      "default_max_tokens": 32768,
 861      "can_reason": true,
 862      "reasoning_levels": [
 863        "low",
 864        "medium",
 865        "high"
 866      ],
 867      "default_reasoning_effort": "medium",
 868      "supports_attachments": true,
 869      "options": {}
 870    },
 871    {
 872      "id": "google/gemini-3-flash-preview",
 873      "name": "Google: Gemini 3 Flash Preview",
 874      "cost_per_1m_in": 0.5,
 875      "cost_per_1m_out": 3,
 876      "cost_per_1m_in_cached": 0,
 877      "cost_per_1m_out_cached": 0.049999999999999996,
 878      "context_window": 1048576,
 879      "default_max_tokens": 32767,
 880      "can_reason": true,
 881      "reasoning_levels": [
 882        "low",
 883        "medium",
 884        "high"
 885      ],
 886      "default_reasoning_effort": "medium",
 887      "supports_attachments": true,
 888      "options": {}
 889    },
 890    {
 891      "id": "google/gemini-3-pro-preview",
 892      "name": "Google: Gemini 3 Pro Preview",
 893      "cost_per_1m_in": 2,
 894      "cost_per_1m_out": 12,
 895      "cost_per_1m_in_cached": 2.375,
 896      "cost_per_1m_out_cached": 0.19999999999999998,
 897      "context_window": 1048576,
 898      "default_max_tokens": 32768,
 899      "can_reason": true,
 900      "reasoning_levels": [
 901        "low",
 902        "medium",
 903        "high"
 904      ],
 905      "default_reasoning_effort": "medium",
 906      "supports_attachments": true,
 907      "options": {}
 908    },
 909    {
 910      "id": "inception/mercury",
 911      "name": "Inception: Mercury",
 912      "cost_per_1m_in": 0.25,
 913      "cost_per_1m_out": 1,
 914      "cost_per_1m_in_cached": 0,
 915      "cost_per_1m_out_cached": 0,
 916      "context_window": 128000,
 917      "default_max_tokens": 8192,
 918      "can_reason": false,
 919      "supports_attachments": false,
 920      "options": {}
 921    },
 922    {
 923      "id": "inception/mercury-coder",
 924      "name": "Inception: Mercury Coder",
 925      "cost_per_1m_in": 0.25,
 926      "cost_per_1m_out": 1,
 927      "cost_per_1m_in_cached": 0,
 928      "cost_per_1m_out_cached": 0,
 929      "context_window": 128000,
 930      "default_max_tokens": 8192,
 931      "can_reason": false,
 932      "supports_attachments": false,
 933      "options": {}
 934    },
 935    {
 936      "id": "kwaipilot/kat-coder-pro",
 937      "name": "Kwaipilot: KAT-Coder-Pro V1",
 938      "cost_per_1m_in": 0.207,
 939      "cost_per_1m_out": 0.828,
 940      "cost_per_1m_in_cached": 0,
 941      "cost_per_1m_out_cached": 0.0414,
 942      "context_window": 256000,
 943      "default_max_tokens": 64000,
 944      "can_reason": false,
 945      "supports_attachments": false,
 946      "options": {}
 947    },
 948    {
 949      "id": "meta-llama/llama-3.1-70b-instruct",
 950      "name": "Meta: Llama 3.1 70B Instruct",
 951      "cost_per_1m_in": 0.88,
 952      "cost_per_1m_out": 0.88,
 953      "cost_per_1m_in_cached": 0,
 954      "cost_per_1m_out_cached": 0,
 955      "context_window": 131072,
 956      "default_max_tokens": 13107,
 957      "can_reason": false,
 958      "supports_attachments": false,
 959      "options": {}
 960    },
 961    {
 962      "id": "meta-llama/llama-3.3-70b-instruct",
 963      "name": "Meta: Llama 3.3 70B Instruct",
 964      "cost_per_1m_in": 0.25,
 965      "cost_per_1m_out": 0.75,
 966      "cost_per_1m_in_cached": 0,
 967      "cost_per_1m_out_cached": 0,
 968      "context_window": 131072,
 969      "default_max_tokens": 13107,
 970      "can_reason": false,
 971      "supports_attachments": false,
 972      "options": {}
 973    },
 974    {
 975      "id": "meta-llama/llama-3.3-70b-instruct:free",
 976      "name": "Meta: Llama 3.3 70B Instruct (free)",
 977      "cost_per_1m_in": 0,
 978      "cost_per_1m_out": 0,
 979      "cost_per_1m_in_cached": 0,
 980      "cost_per_1m_out_cached": 0,
 981      "context_window": 131072,
 982      "default_max_tokens": 13107,
 983      "can_reason": false,
 984      "supports_attachments": false,
 985      "options": {}
 986    },
 987    {
 988      "id": "meta-llama/llama-4-maverick",
 989      "name": "Meta: Llama 4 Maverick",
 990      "cost_per_1m_in": 0.22,
 991      "cost_per_1m_out": 0.88,
 992      "cost_per_1m_in_cached": 0,
 993      "cost_per_1m_out_cached": 0,
 994      "context_window": 1048576,
 995      "default_max_tokens": 104857,
 996      "can_reason": false,
 997      "supports_attachments": true,
 998      "options": {}
 999    },
1000    {
1001      "id": "meta-llama/llama-4-scout",
1002      "name": "Meta: Llama 4 Scout",
1003      "cost_per_1m_in": 0.25,
1004      "cost_per_1m_out": 0.7,
1005      "cost_per_1m_in_cached": 0,
1006      "cost_per_1m_out_cached": 0,
1007      "context_window": 1310720,
1008      "default_max_tokens": 4096,
1009      "can_reason": false,
1010      "supports_attachments": true,
1011      "options": {}
1012    },
1013    {
1014      "id": "minimax/minimax-m2",
1015      "name": "MiniMax: MiniMax M2",
1016      "cost_per_1m_in": 0.3,
1017      "cost_per_1m_out": 1.2,
1018      "cost_per_1m_in_cached": 0.375,
1019      "cost_per_1m_out_cached": 0.03,
1020      "context_window": 204800,
1021      "default_max_tokens": 65536,
1022      "can_reason": true,
1023      "reasoning_levels": [
1024        "low",
1025        "medium",
1026        "high"
1027      ],
1028      "default_reasoning_effort": "medium",
1029      "supports_attachments": false,
1030      "options": {}
1031    },
1032    {
1033      "id": "minimax/minimax-m2.1",
1034      "name": "MiniMax: MiniMax M2.1",
1035      "cost_per_1m_in": 0.3,
1036      "cost_per_1m_out": 1.2,
1037      "cost_per_1m_in_cached": 0.375,
1038      "cost_per_1m_out_cached": 0.03,
1039      "context_window": 204800,
1040      "default_max_tokens": 65536,
1041      "can_reason": true,
1042      "reasoning_levels": [
1043        "low",
1044        "medium",
1045        "high"
1046      ],
1047      "default_reasoning_effort": "medium",
1048      "supports_attachments": false,
1049      "options": {}
1050    },
1051    {
1052      "id": "mistralai/mistral-large",
1053      "name": "Mistral Large",
1054      "cost_per_1m_in": 2,
1055      "cost_per_1m_out": 6,
1056      "cost_per_1m_in_cached": 0,
1057      "cost_per_1m_out_cached": 0,
1058      "context_window": 128000,
1059      "default_max_tokens": 12800,
1060      "can_reason": false,
1061      "supports_attachments": false,
1062      "options": {}
1063    },
1064    {
1065      "id": "mistralai/mistral-large-2407",
1066      "name": "Mistral Large 2407",
1067      "cost_per_1m_in": 2,
1068      "cost_per_1m_out": 6,
1069      "cost_per_1m_in_cached": 0,
1070      "cost_per_1m_out_cached": 0,
1071      "context_window": 131072,
1072      "default_max_tokens": 13107,
1073      "can_reason": false,
1074      "supports_attachments": false,
1075      "options": {}
1076    },
1077    {
1078      "id": "mistralai/mistral-large-2411",
1079      "name": "Mistral Large 2411",
1080      "cost_per_1m_in": 2,
1081      "cost_per_1m_out": 6,
1082      "cost_per_1m_in_cached": 0,
1083      "cost_per_1m_out_cached": 0,
1084      "context_window": 131072,
1085      "default_max_tokens": 13107,
1086      "can_reason": false,
1087      "supports_attachments": false,
1088      "options": {}
1089    },
1090    {
1091      "id": "mistralai/mistral-tiny",
1092      "name": "Mistral Tiny",
1093      "cost_per_1m_in": 0.25,
1094      "cost_per_1m_out": 0.25,
1095      "cost_per_1m_in_cached": 0,
1096      "cost_per_1m_out_cached": 0,
1097      "context_window": 32768,
1098      "default_max_tokens": 3276,
1099      "can_reason": false,
1100      "supports_attachments": false,
1101      "options": {}
1102    },
1103    {
1104      "id": "mistralai/codestral-2508",
1105      "name": "Mistral: Codestral 2508",
1106      "cost_per_1m_in": 0.3,
1107      "cost_per_1m_out": 0.8999999999999999,
1108      "cost_per_1m_in_cached": 0,
1109      "cost_per_1m_out_cached": 0,
1110      "context_window": 256000,
1111      "default_max_tokens": 25600,
1112      "can_reason": false,
1113      "supports_attachments": false,
1114      "options": {}
1115    },
1116    {
1117      "id": "mistralai/devstral-2512",
1118      "name": "Mistral: Devstral 2 2512",
1119      "cost_per_1m_in": 0.049999999999999996,
1120      "cost_per_1m_out": 0.22,
1121      "cost_per_1m_in_cached": 0,
1122      "cost_per_1m_out_cached": 0,
1123      "context_window": 262144,
1124      "default_max_tokens": 32768,
1125      "can_reason": false,
1126      "supports_attachments": false,
1127      "options": {}
1128    },
1129    {
1130      "id": "mistralai/devstral-2512:free",
1131      "name": "Mistral: Devstral 2 2512 (free)",
1132      "cost_per_1m_in": 0,
1133      "cost_per_1m_out": 0,
1134      "cost_per_1m_in_cached": 0,
1135      "cost_per_1m_out_cached": 0,
1136      "context_window": 262144,
1137      "default_max_tokens": 26214,
1138      "can_reason": false,
1139      "supports_attachments": false,
1140      "options": {}
1141    },
1142    {
1143      "id": "mistralai/devstral-medium",
1144      "name": "Mistral: Devstral Medium",
1145      "cost_per_1m_in": 0.39999999999999997,
1146      "cost_per_1m_out": 2,
1147      "cost_per_1m_in_cached": 0,
1148      "cost_per_1m_out_cached": 0,
1149      "context_window": 131072,
1150      "default_max_tokens": 13107,
1151      "can_reason": false,
1152      "supports_attachments": false,
1153      "options": {}
1154    },
1155    {
1156      "id": "mistralai/devstral-small",
1157      "name": "Mistral: Devstral Small 1.1",
1158      "cost_per_1m_in": 0.09999999999999999,
1159      "cost_per_1m_out": 0.3,
1160      "cost_per_1m_in_cached": 0,
1161      "cost_per_1m_out_cached": 0,
1162      "context_window": 131072,
1163      "default_max_tokens": 13107,
1164      "can_reason": false,
1165      "supports_attachments": false,
1166      "options": {}
1167    },
1168    {
1169      "id": "mistralai/ministral-14b-2512",
1170      "name": "Mistral: Ministral 3 14B 2512",
1171      "cost_per_1m_in": 0.19999999999999998,
1172      "cost_per_1m_out": 0.19999999999999998,
1173      "cost_per_1m_in_cached": 0,
1174      "cost_per_1m_out_cached": 0,
1175      "context_window": 262144,
1176      "default_max_tokens": 26214,
1177      "can_reason": false,
1178      "supports_attachments": true,
1179      "options": {}
1180    },
1181    {
1182      "id": "mistralai/ministral-3b-2512",
1183      "name": "Mistral: Ministral 3 3B 2512",
1184      "cost_per_1m_in": 0.09999999999999999,
1185      "cost_per_1m_out": 0.09999999999999999,
1186      "cost_per_1m_in_cached": 0,
1187      "cost_per_1m_out_cached": 0,
1188      "context_window": 131072,
1189      "default_max_tokens": 13107,
1190      "can_reason": false,
1191      "supports_attachments": true,
1192      "options": {}
1193    },
1194    {
1195      "id": "mistralai/ministral-8b-2512",
1196      "name": "Mistral: Ministral 3 8B 2512",
1197      "cost_per_1m_in": 0.15,
1198      "cost_per_1m_out": 0.15,
1199      "cost_per_1m_in_cached": 0,
1200      "cost_per_1m_out_cached": 0,
1201      "context_window": 262144,
1202      "default_max_tokens": 26214,
1203      "can_reason": false,
1204      "supports_attachments": true,
1205      "options": {}
1206    },
1207    {
1208      "id": "mistralai/ministral-3b",
1209      "name": "Mistral: Ministral 3B",
1210      "cost_per_1m_in": 0.04,
1211      "cost_per_1m_out": 0.04,
1212      "cost_per_1m_in_cached": 0,
1213      "cost_per_1m_out_cached": 0,
1214      "context_window": 131072,
1215      "default_max_tokens": 13107,
1216      "can_reason": false,
1217      "supports_attachments": false,
1218      "options": {}
1219    },
1220    {
1221      "id": "mistralai/ministral-8b",
1222      "name": "Mistral: Ministral 8B",
1223      "cost_per_1m_in": 0.09999999999999999,
1224      "cost_per_1m_out": 0.09999999999999999,
1225      "cost_per_1m_in_cached": 0,
1226      "cost_per_1m_out_cached": 0,
1227      "context_window": 131072,
1228      "default_max_tokens": 13107,
1229      "can_reason": false,
1230      "supports_attachments": false,
1231      "options": {}
1232    },
1233    {
1234      "id": "mistralai/mistral-large-2512",
1235      "name": "Mistral: Mistral Large 3 2512",
1236      "cost_per_1m_in": 0.5,
1237      "cost_per_1m_out": 1.5,
1238      "cost_per_1m_in_cached": 0,
1239      "cost_per_1m_out_cached": 0,
1240      "context_window": 262144,
1241      "default_max_tokens": 26214,
1242      "can_reason": false,
1243      "supports_attachments": true,
1244      "options": {}
1245    },
1246    {
1247      "id": "mistralai/mistral-medium-3",
1248      "name": "Mistral: Mistral Medium 3",
1249      "cost_per_1m_in": 0.39999999999999997,
1250      "cost_per_1m_out": 2,
1251      "cost_per_1m_in_cached": 0,
1252      "cost_per_1m_out_cached": 0,
1253      "context_window": 131072,
1254      "default_max_tokens": 13107,
1255      "can_reason": false,
1256      "supports_attachments": true,
1257      "options": {}
1258    },
1259    {
1260      "id": "mistralai/mistral-medium-3.1",
1261      "name": "Mistral: Mistral Medium 3.1",
1262      "cost_per_1m_in": 0.39999999999999997,
1263      "cost_per_1m_out": 2,
1264      "cost_per_1m_in_cached": 0,
1265      "cost_per_1m_out_cached": 0,
1266      "context_window": 131072,
1267      "default_max_tokens": 13107,
1268      "can_reason": false,
1269      "supports_attachments": true,
1270      "options": {}
1271    },
1272    {
1273      "id": "mistralai/mistral-nemo",
1274      "name": "Mistral: Mistral Nemo",
1275      "cost_per_1m_in": 0.15,
1276      "cost_per_1m_out": 0.15,
1277      "cost_per_1m_in_cached": 0,
1278      "cost_per_1m_out_cached": 0,
1279      "context_window": 131072,
1280      "default_max_tokens": 13107,
1281      "can_reason": false,
1282      "supports_attachments": false,
1283      "options": {}
1284    },
1285    {
1286      "id": "mistralai/mistral-small-24b-instruct-2501",
1287      "name": "Mistral: Mistral Small 3",
1288      "cost_per_1m_in": 0.09999999999999999,
1289      "cost_per_1m_out": 0.3,
1290      "cost_per_1m_in_cached": 0,
1291      "cost_per_1m_out_cached": 0,
1292      "context_window": 32768,
1293      "default_max_tokens": 1024,
1294      "can_reason": false,
1295      "supports_attachments": false,
1296      "options": {}
1297    },
1298    {
1299      "id": "mistralai/mistral-small-3.1-24b-instruct",
1300      "name": "Mistral: Mistral Small 3.1 24B",
1301      "cost_per_1m_in": 0.03,
1302      "cost_per_1m_out": 0.11,
1303      "cost_per_1m_in_cached": 0,
1304      "cost_per_1m_out_cached": 0,
1305      "context_window": 131072,
1306      "default_max_tokens": 65536,
1307      "can_reason": false,
1308      "supports_attachments": true,
1309      "options": {}
1310    },
1311    {
1312      "id": "mistralai/mistral-small-3.1-24b-instruct:free",
1313      "name": "Mistral: Mistral Small 3.1 24B (free)",
1314      "cost_per_1m_in": 0,
1315      "cost_per_1m_out": 0,
1316      "cost_per_1m_in_cached": 0,
1317      "cost_per_1m_out_cached": 0,
1318      "context_window": 128000,
1319      "default_max_tokens": 12800,
1320      "can_reason": false,
1321      "supports_attachments": true,
1322      "options": {}
1323    },
1324    {
1325      "id": "mistralai/mistral-small-3.2-24b-instruct",
1326      "name": "Mistral: Mistral Small 3.2 24B",
1327      "cost_per_1m_in": 0.06,
1328      "cost_per_1m_out": 0.18,
1329      "cost_per_1m_in_cached": 0,
1330      "cost_per_1m_out_cached": 0,
1331      "context_window": 131072,
1332      "default_max_tokens": 65536,
1333      "can_reason": false,
1334      "supports_attachments": true,
1335      "options": {}
1336    },
1337    {
1338      "id": "mistralai/mistral-small-creative",
1339      "name": "Mistral: Mistral Small Creative",
1340      "cost_per_1m_in": 0.09999999999999999,
1341      "cost_per_1m_out": 0.3,
1342      "cost_per_1m_in_cached": 0,
1343      "cost_per_1m_out_cached": 0,
1344      "context_window": 32768,
1345      "default_max_tokens": 3276,
1346      "can_reason": false,
1347      "supports_attachments": false,
1348      "options": {}
1349    },
1350    {
1351      "id": "mistralai/mixtral-8x22b-instruct",
1352      "name": "Mistral: Mixtral 8x22B Instruct",
1353      "cost_per_1m_in": 2,
1354      "cost_per_1m_out": 6,
1355      "cost_per_1m_in_cached": 0,
1356      "cost_per_1m_out_cached": 0,
1357      "context_window": 65536,
1358      "default_max_tokens": 6553,
1359      "can_reason": false,
1360      "supports_attachments": false,
1361      "options": {}
1362    },
1363    {
1364      "id": "mistralai/mixtral-8x7b-instruct",
1365      "name": "Mistral: Mixtral 8x7B Instruct",
1366      "cost_per_1m_in": 0.54,
1367      "cost_per_1m_out": 0.54,
1368      "cost_per_1m_in_cached": 0,
1369      "cost_per_1m_out_cached": 0,
1370      "context_window": 32768,
1371      "default_max_tokens": 8192,
1372      "can_reason": false,
1373      "supports_attachments": false,
1374      "options": {}
1375    },
1376    {
1377      "id": "mistralai/pixtral-12b",
1378      "name": "Mistral: Pixtral 12B",
1379      "cost_per_1m_in": 0.15,
1380      "cost_per_1m_out": 0.15,
1381      "cost_per_1m_in_cached": 0,
1382      "cost_per_1m_out_cached": 0,
1383      "context_window": 131072,
1384      "default_max_tokens": 13107,
1385      "can_reason": false,
1386      "supports_attachments": true,
1387      "options": {}
1388    },
1389    {
1390      "id": "mistralai/pixtral-large-2411",
1391      "name": "Mistral: Pixtral Large 2411",
1392      "cost_per_1m_in": 2,
1393      "cost_per_1m_out": 6,
1394      "cost_per_1m_in_cached": 0,
1395      "cost_per_1m_out_cached": 0,
1396      "context_window": 131072,
1397      "default_max_tokens": 13107,
1398      "can_reason": false,
1399      "supports_attachments": true,
1400      "options": {}
1401    },
1402    {
1403      "id": "mistralai/mistral-saba",
1404      "name": "Mistral: Saba",
1405      "cost_per_1m_in": 0.19999999999999998,
1406      "cost_per_1m_out": 0.6,
1407      "cost_per_1m_in_cached": 0,
1408      "cost_per_1m_out_cached": 0,
1409      "context_window": 32768,
1410      "default_max_tokens": 3276,
1411      "can_reason": false,
1412      "supports_attachments": false,
1413      "options": {}
1414    },
1415    {
1416      "id": "mistralai/voxtral-small-24b-2507",
1417      "name": "Mistral: Voxtral Small 24B 2507",
1418      "cost_per_1m_in": 0.09999999999999999,
1419      "cost_per_1m_out": 0.3,
1420      "cost_per_1m_in_cached": 0,
1421      "cost_per_1m_out_cached": 0,
1422      "context_window": 32000,
1423      "default_max_tokens": 3200,
1424      "can_reason": false,
1425      "supports_attachments": false,
1426      "options": {}
1427    },
1428    {
1429      "id": "moonshotai/kimi-k2",
1430      "name": "MoonshotAI: Kimi K2 0711",
1431      "cost_per_1m_in": 0.5,
1432      "cost_per_1m_out": 2.4,
1433      "cost_per_1m_in_cached": 0,
1434      "cost_per_1m_out_cached": 0,
1435      "context_window": 131072,
1436      "default_max_tokens": 13107,
1437      "can_reason": false,
1438      "supports_attachments": false,
1439      "options": {}
1440    },
1441    {
1442      "id": "moonshotai/kimi-k2-0905",
1443      "name": "MoonshotAI: Kimi K2 0905",
1444      "cost_per_1m_in": 0.59,
1445      "cost_per_1m_out": 2.99,
1446      "cost_per_1m_in_cached": 0,
1447      "cost_per_1m_out_cached": 0,
1448      "context_window": 262144,
1449      "default_max_tokens": 131072,
1450      "can_reason": false,
1451      "supports_attachments": false,
1452      "options": {}
1453    },
1454    {
1455      "id": "moonshotai/kimi-k2-0905:exacto",
1456      "name": "MoonshotAI: Kimi K2 0905 (exacto)",
1457      "cost_per_1m_in": 1,
1458      "cost_per_1m_out": 3,
1459      "cost_per_1m_in_cached": 0,
1460      "cost_per_1m_out_cached": 0,
1461      "context_window": 262144,
1462      "default_max_tokens": 8192,
1463      "can_reason": false,
1464      "supports_attachments": false,
1465      "options": {}
1466    },
1467    {
1468      "id": "moonshotai/kimi-k2-thinking",
1469      "name": "MoonshotAI: Kimi K2 Thinking",
1470      "cost_per_1m_in": 0.6,
1471      "cost_per_1m_out": 2.5,
1472      "cost_per_1m_in_cached": 0,
1473      "cost_per_1m_out_cached": 0,
1474      "context_window": 262144,
1475      "default_max_tokens": 131072,
1476      "can_reason": true,
1477      "reasoning_levels": [
1478        "low",
1479        "medium",
1480        "high"
1481      ],
1482      "default_reasoning_effort": "medium",
1483      "supports_attachments": false,
1484      "options": {}
1485    },
1486    {
1487      "id": "nvidia/llama-3.1-nemotron-70b-instruct",
1488      "name": "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
1489      "cost_per_1m_in": 1.2,
1490      "cost_per_1m_out": 1.2,
1491      "cost_per_1m_in_cached": 0,
1492      "cost_per_1m_out_cached": 0,
1493      "context_window": 131072,
1494      "default_max_tokens": 8192,
1495      "can_reason": false,
1496      "supports_attachments": false,
1497      "options": {}
1498    },
1499    {
1500      "id": "nvidia/llama-3.3-nemotron-super-49b-v1.5",
1501      "name": "NVIDIA: Llama 3.3 Nemotron Super 49B V1.5",
1502      "cost_per_1m_in": 0.09999999999999999,
1503      "cost_per_1m_out": 0.39999999999999997,
1504      "cost_per_1m_in_cached": 0,
1505      "cost_per_1m_out_cached": 0,
1506      "context_window": 131072,
1507      "default_max_tokens": 13107,
1508      "can_reason": true,
1509      "reasoning_levels": [
1510        "low",
1511        "medium",
1512        "high"
1513      ],
1514      "default_reasoning_effort": "medium",
1515      "supports_attachments": false,
1516      "options": {}
1517    },
1518    {
1519      "id": "nvidia/nemotron-3-nano-30b-a3b",
1520      "name": "NVIDIA: Nemotron 3 Nano 30B A3B",
1521      "cost_per_1m_in": 0.06,
1522      "cost_per_1m_out": 0.24,
1523      "cost_per_1m_in_cached": 0,
1524      "cost_per_1m_out_cached": 0,
1525      "context_window": 262144,
1526      "default_max_tokens": 26214,
1527      "can_reason": true,
1528      "reasoning_levels": [
1529        "low",
1530        "medium",
1531        "high"
1532      ],
1533      "default_reasoning_effort": "medium",
1534      "supports_attachments": false,
1535      "options": {}
1536    },
1537    {
1538      "id": "nvidia/nemotron-3-nano-30b-a3b:free",
1539      "name": "NVIDIA: Nemotron 3 Nano 30B A3B (free)",
1540      "cost_per_1m_in": 0,
1541      "cost_per_1m_out": 0,
1542      "cost_per_1m_in_cached": 0,
1543      "cost_per_1m_out_cached": 0,
1544      "context_window": 256000,
1545      "default_max_tokens": 25600,
1546      "can_reason": true,
1547      "reasoning_levels": [
1548        "low",
1549        "medium",
1550        "high"
1551      ],
1552      "default_reasoning_effort": "medium",
1553      "supports_attachments": false,
1554      "options": {}
1555    },
1556    {
1557      "id": "nvidia/nemotron-nano-12b-v2-vl:free",
1558      "name": "NVIDIA: Nemotron Nano 12B 2 VL (free)",
1559      "cost_per_1m_in": 0,
1560      "cost_per_1m_out": 0,
1561      "cost_per_1m_in_cached": 0,
1562      "cost_per_1m_out_cached": 0,
1563      "context_window": 128000,
1564      "default_max_tokens": 64000,
1565      "can_reason": true,
1566      "reasoning_levels": [
1567        "low",
1568        "medium",
1569        "high"
1570      ],
1571      "default_reasoning_effort": "medium",
1572      "supports_attachments": true,
1573      "options": {}
1574    },
1575    {
1576      "id": "nvidia/nemotron-nano-9b-v2",
1577      "name": "NVIDIA: Nemotron Nano 9B V2",
1578      "cost_per_1m_in": 0.04,
1579      "cost_per_1m_out": 0.16,
1580      "cost_per_1m_in_cached": 0,
1581      "cost_per_1m_out_cached": 0,
1582      "context_window": 131072,
1583      "default_max_tokens": 13107,
1584      "can_reason": true,
1585      "reasoning_levels": [
1586        "low",
1587        "medium",
1588        "high"
1589      ],
1590      "default_reasoning_effort": "medium",
1591      "supports_attachments": false,
1592      "options": {}
1593    },
1594    {
1595      "id": "nvidia/nemotron-nano-9b-v2:free",
1596      "name": "NVIDIA: Nemotron Nano 9B V2 (free)",
1597      "cost_per_1m_in": 0,
1598      "cost_per_1m_out": 0,
1599      "cost_per_1m_in_cached": 0,
1600      "cost_per_1m_out_cached": 0,
1601      "context_window": 128000,
1602      "default_max_tokens": 12800,
1603      "can_reason": true,
1604      "reasoning_levels": [
1605        "low",
1606        "medium",
1607        "high"
1608      ],
1609      "default_reasoning_effort": "medium",
1610      "supports_attachments": false,
1611      "options": {}
1612    },
1613    {
1614      "id": "nex-agi/deepseek-v3.1-nex-n1",
1615      "name": "Nex AGI: DeepSeek V3.1 Nex N1",
1616      "cost_per_1m_in": 0.27,
1617      "cost_per_1m_out": 1,
1618      "cost_per_1m_in_cached": 0,
1619      "cost_per_1m_out_cached": 0,
1620      "context_window": 131072,
1621      "default_max_tokens": 81920,
1622      "can_reason": false,
1623      "supports_attachments": false,
1624      "options": {}
1625    },
1626    {
1627      "id": "nousresearch/deephermes-3-mistral-24b-preview",
1628      "name": "Nous: DeepHermes 3 Mistral 24B Preview",
1629      "cost_per_1m_in": 0.02,
1630      "cost_per_1m_out": 0.09999999999999999,
1631      "cost_per_1m_in_cached": 0,
1632      "cost_per_1m_out_cached": 0,
1633      "context_window": 32768,
1634      "default_max_tokens": 16384,
1635      "can_reason": true,
1636      "reasoning_levels": [
1637        "low",
1638        "medium",
1639        "high"
1640      ],
1641      "default_reasoning_effort": "medium",
1642      "supports_attachments": false,
1643      "options": {}
1644    },
1645    {
1646      "id": "openai/gpt-4-turbo",
1647      "name": "OpenAI: GPT-4 Turbo",
1648      "cost_per_1m_in": 10,
1649      "cost_per_1m_out": 30,
1650      "cost_per_1m_in_cached": 0,
1651      "cost_per_1m_out_cached": 0,
1652      "context_window": 128000,
1653      "default_max_tokens": 2048,
1654      "can_reason": false,
1655      "supports_attachments": true,
1656      "options": {}
1657    },
1658    {
1659      "id": "openai/gpt-4-1106-preview",
1660      "name": "OpenAI: GPT-4 Turbo (older v1106)",
1661      "cost_per_1m_in": 10,
1662      "cost_per_1m_out": 30,
1663      "cost_per_1m_in_cached": 0,
1664      "cost_per_1m_out_cached": 0,
1665      "context_window": 128000,
1666      "default_max_tokens": 2048,
1667      "can_reason": false,
1668      "supports_attachments": false,
1669      "options": {}
1670    },
1671    {
1672      "id": "openai/gpt-4-turbo-preview",
1673      "name": "OpenAI: GPT-4 Turbo Preview",
1674      "cost_per_1m_in": 10,
1675      "cost_per_1m_out": 30,
1676      "cost_per_1m_in_cached": 0,
1677      "cost_per_1m_out_cached": 0,
1678      "context_window": 128000,
1679      "default_max_tokens": 2048,
1680      "can_reason": false,
1681      "supports_attachments": false,
1682      "options": {}
1683    },
1684    {
1685      "id": "openai/gpt-4.1",
1686      "name": "OpenAI: GPT-4.1",
1687      "cost_per_1m_in": 2,
1688      "cost_per_1m_out": 8,
1689      "cost_per_1m_in_cached": 0,
1690      "cost_per_1m_out_cached": 0.5,
1691      "context_window": 1047576,
1692      "default_max_tokens": 104757,
1693      "can_reason": false,
1694      "supports_attachments": true,
1695      "options": {}
1696    },
1697    {
1698      "id": "openai/gpt-4.1-mini",
1699      "name": "OpenAI: GPT-4.1 Mini",
1700      "cost_per_1m_in": 0.39999999999999997,
1701      "cost_per_1m_out": 1.5999999999999999,
1702      "cost_per_1m_in_cached": 0,
1703      "cost_per_1m_out_cached": 0.09999999999999999,
1704      "context_window": 1047576,
1705      "default_max_tokens": 104757,
1706      "can_reason": false,
1707      "supports_attachments": true,
1708      "options": {}
1709    },
1710    {
1711      "id": "openai/gpt-4.1-nano",
1712      "name": "OpenAI: GPT-4.1 Nano",
1713      "cost_per_1m_in": 0.09999999999999999,
1714      "cost_per_1m_out": 0.39999999999999997,
1715      "cost_per_1m_in_cached": 0,
1716      "cost_per_1m_out_cached": 0.03,
1717      "context_window": 1047576,
1718      "default_max_tokens": 104757,
1719      "can_reason": false,
1720      "supports_attachments": true,
1721      "options": {}
1722    },
1723    {
1724      "id": "openai/gpt-4o",
1725      "name": "OpenAI: GPT-4o",
1726      "cost_per_1m_in": 2.5,
1727      "cost_per_1m_out": 10,
1728      "cost_per_1m_in_cached": 0,
1729      "cost_per_1m_out_cached": 0,
1730      "context_window": 128000,
1731      "default_max_tokens": 8192,
1732      "can_reason": false,
1733      "supports_attachments": true,
1734      "options": {}
1735    },
1736    {
1737      "id": "openai/gpt-4o-2024-05-13",
1738      "name": "OpenAI: GPT-4o (2024-05-13)",
1739      "cost_per_1m_in": 5,
1740      "cost_per_1m_out": 15,
1741      "cost_per_1m_in_cached": 0,
1742      "cost_per_1m_out_cached": 0,
1743      "context_window": 128000,
1744      "default_max_tokens": 2048,
1745      "can_reason": false,
1746      "supports_attachments": true,
1747      "options": {}
1748    },
1749    {
1750      "id": "openai/gpt-4o-2024-08-06",
1751      "name": "OpenAI: GPT-4o (2024-08-06)",
1752      "cost_per_1m_in": 2.5,
1753      "cost_per_1m_out": 10,
1754      "cost_per_1m_in_cached": 0,
1755      "cost_per_1m_out_cached": 1.25,
1756      "context_window": 128000,
1757      "default_max_tokens": 8192,
1758      "can_reason": false,
1759      "supports_attachments": true,
1760      "options": {}
1761    },
1762    {
1763      "id": "openai/gpt-4o-2024-11-20",
1764      "name": "OpenAI: GPT-4o (2024-11-20)",
1765      "cost_per_1m_in": 2.5,
1766      "cost_per_1m_out": 10,
1767      "cost_per_1m_in_cached": 0,
1768      "cost_per_1m_out_cached": 1.25,
1769      "context_window": 128000,
1770      "default_max_tokens": 8192,
1771      "can_reason": false,
1772      "supports_attachments": true,
1773      "options": {}
1774    },
1775    {
1776      "id": "openai/gpt-4o:extended",
1777      "name": "OpenAI: GPT-4o (extended)",
1778      "cost_per_1m_in": 6,
1779      "cost_per_1m_out": 18,
1780      "cost_per_1m_in_cached": 0,
1781      "cost_per_1m_out_cached": 0,
1782      "context_window": 128000,
1783      "default_max_tokens": 32000,
1784      "can_reason": false,
1785      "supports_attachments": true,
1786      "options": {}
1787    },
1788    {
1789      "id": "openai/gpt-4o-audio-preview",
1790      "name": "OpenAI: GPT-4o Audio",
1791      "cost_per_1m_in": 2.5,
1792      "cost_per_1m_out": 10,
1793      "cost_per_1m_in_cached": 0,
1794      "cost_per_1m_out_cached": 0,
1795      "context_window": 128000,
1796      "default_max_tokens": 8192,
1797      "can_reason": false,
1798      "supports_attachments": false,
1799      "options": {}
1800    },
1801    {
1802      "id": "openai/gpt-4o-mini",
1803      "name": "OpenAI: GPT-4o-mini",
1804      "cost_per_1m_in": 0.15,
1805      "cost_per_1m_out": 0.6,
1806      "cost_per_1m_in_cached": 0,
1807      "cost_per_1m_out_cached": 0.075,
1808      "context_window": 128000,
1809      "default_max_tokens": 8192,
1810      "can_reason": false,
1811      "supports_attachments": true,
1812      "options": {}
1813    },
1814    {
1815      "id": "openai/gpt-4o-mini-2024-07-18",
1816      "name": "OpenAI: GPT-4o-mini (2024-07-18)",
1817      "cost_per_1m_in": 0.15,
1818      "cost_per_1m_out": 0.6,
1819      "cost_per_1m_in_cached": 0,
1820      "cost_per_1m_out_cached": 0.075,
1821      "context_window": 128000,
1822      "default_max_tokens": 8192,
1823      "can_reason": false,
1824      "supports_attachments": true,
1825      "options": {}
1826    },
1827    {
1828      "id": "openai/gpt-5",
1829      "name": "OpenAI: GPT-5",
1830      "cost_per_1m_in": 1.25,
1831      "cost_per_1m_out": 10,
1832      "cost_per_1m_in_cached": 0,
1833      "cost_per_1m_out_cached": 0.125,
1834      "context_window": 400000,
1835      "default_max_tokens": 64000,
1836      "can_reason": true,
1837      "reasoning_levels": [
1838        "low",
1839        "medium",
1840        "high"
1841      ],
1842      "default_reasoning_effort": "medium",
1843      "supports_attachments": true,
1844      "options": {}
1845    },
1846    {
1847      "id": "openai/gpt-5-codex",
1848      "name": "OpenAI: GPT-5 Codex",
1849      "cost_per_1m_in": 1.25,
1850      "cost_per_1m_out": 10,
1851      "cost_per_1m_in_cached": 0,
1852      "cost_per_1m_out_cached": 0.125,
1853      "context_window": 400000,
1854      "default_max_tokens": 64000,
1855      "can_reason": true,
1856      "reasoning_levels": [
1857        "low",
1858        "medium",
1859        "high"
1860      ],
1861      "default_reasoning_effort": "medium",
1862      "supports_attachments": true,
1863      "options": {}
1864    },
1865    {
1866      "id": "openai/gpt-5-image",
1867      "name": "OpenAI: GPT-5 Image",
1868      "cost_per_1m_in": 10,
1869      "cost_per_1m_out": 10,
1870      "cost_per_1m_in_cached": 0,
1871      "cost_per_1m_out_cached": 1.25,
1872      "context_window": 400000,
1873      "default_max_tokens": 64000,
1874      "can_reason": true,
1875      "reasoning_levels": [
1876        "low",
1877        "medium",
1878        "high"
1879      ],
1880      "default_reasoning_effort": "medium",
1881      "supports_attachments": true,
1882      "options": {}
1883    },
1884    {
1885      "id": "openai/gpt-5-image-mini",
1886      "name": "OpenAI: GPT-5 Image Mini",
1887      "cost_per_1m_in": 2.5,
1888      "cost_per_1m_out": 2,
1889      "cost_per_1m_in_cached": 0,
1890      "cost_per_1m_out_cached": 0.25,
1891      "context_window": 400000,
1892      "default_max_tokens": 64000,
1893      "can_reason": true,
1894      "reasoning_levels": [
1895        "low",
1896        "medium",
1897        "high"
1898      ],
1899      "default_reasoning_effort": "medium",
1900      "supports_attachments": true,
1901      "options": {}
1902    },
1903    {
1904      "id": "openai/gpt-5-mini",
1905      "name": "OpenAI: GPT-5 Mini",
1906      "cost_per_1m_in": 0.25,
1907      "cost_per_1m_out": 2,
1908      "cost_per_1m_in_cached": 0,
1909      "cost_per_1m_out_cached": 0.024999999999999998,
1910      "context_window": 400000,
1911      "default_max_tokens": 64000,
1912      "can_reason": true,
1913      "reasoning_levels": [
1914        "low",
1915        "medium",
1916        "high"
1917      ],
1918      "default_reasoning_effort": "medium",
1919      "supports_attachments": true,
1920      "options": {}
1921    },
1922    {
1923      "id": "openai/gpt-5-nano",
1924      "name": "OpenAI: GPT-5 Nano",
1925      "cost_per_1m_in": 0.049999999999999996,
1926      "cost_per_1m_out": 0.39999999999999997,
1927      "cost_per_1m_in_cached": 0,
1928      "cost_per_1m_out_cached": 0.01,
1929      "context_window": 400000,
1930      "default_max_tokens": 40000,
1931      "can_reason": true,
1932      "reasoning_levels": [
1933        "low",
1934        "medium",
1935        "high"
1936      ],
1937      "default_reasoning_effort": "medium",
1938      "supports_attachments": true,
1939      "options": {}
1940    },
1941    {
1942      "id": "openai/gpt-5-pro",
1943      "name": "OpenAI: GPT-5 Pro",
1944      "cost_per_1m_in": 15,
1945      "cost_per_1m_out": 120,
1946      "cost_per_1m_in_cached": 0,
1947      "cost_per_1m_out_cached": 0,
1948      "context_window": 400000,
1949      "default_max_tokens": 64000,
1950      "can_reason": true,
1951      "reasoning_levels": [
1952        "low",
1953        "medium",
1954        "high"
1955      ],
1956      "default_reasoning_effort": "medium",
1957      "supports_attachments": true,
1958      "options": {}
1959    },
1960    {
1961      "id": "openai/gpt-5.1",
1962      "name": "OpenAI: GPT-5.1",
1963      "cost_per_1m_in": 1.25,
1964      "cost_per_1m_out": 10,
1965      "cost_per_1m_in_cached": 0,
1966      "cost_per_1m_out_cached": 0.125,
1967      "context_window": 400000,
1968      "default_max_tokens": 64000,
1969      "can_reason": true,
1970      "reasoning_levels": [
1971        "low",
1972        "medium",
1973        "high"
1974      ],
1975      "default_reasoning_effort": "medium",
1976      "supports_attachments": true,
1977      "options": {}
1978    },
1979    {
1980      "id": "openai/gpt-5.1-chat",
1981      "name": "OpenAI: GPT-5.1 Chat",
1982      "cost_per_1m_in": 1.25,
1983      "cost_per_1m_out": 10,
1984      "cost_per_1m_in_cached": 0,
1985      "cost_per_1m_out_cached": 0.125,
1986      "context_window": 128000,
1987      "default_max_tokens": 8192,
1988      "can_reason": false,
1989      "supports_attachments": true,
1990      "options": {}
1991    },
1992    {
1993      "id": "openai/gpt-5.1-codex",
1994      "name": "OpenAI: GPT-5.1-Codex",
1995      "cost_per_1m_in": 1.25,
1996      "cost_per_1m_out": 10,
1997      "cost_per_1m_in_cached": 0,
1998      "cost_per_1m_out_cached": 0.125,
1999      "context_window": 400000,
2000      "default_max_tokens": 64000,
2001      "can_reason": true,
2002      "reasoning_levels": [
2003        "low",
2004        "medium",
2005        "high"
2006      ],
2007      "default_reasoning_effort": "medium",
2008      "supports_attachments": true,
2009      "options": {}
2010    },
2011    {
2012      "id": "openai/gpt-5.1-codex-max",
2013      "name": "OpenAI: GPT-5.1-Codex-Max",
2014      "cost_per_1m_in": 1.25,
2015      "cost_per_1m_out": 10,
2016      "cost_per_1m_in_cached": 0,
2017      "cost_per_1m_out_cached": 0.125,
2018      "context_window": 400000,
2019      "default_max_tokens": 64000,
2020      "can_reason": true,
2021      "reasoning_levels": [
2022        "low",
2023        "medium",
2024        "high"
2025      ],
2026      "default_reasoning_effort": "medium",
2027      "supports_attachments": true,
2028      "options": {}
2029    },
2030    {
2031      "id": "openai/gpt-5.1-codex-mini",
2032      "name": "OpenAI: GPT-5.1-Codex-Mini",
2033      "cost_per_1m_in": 0.25,
2034      "cost_per_1m_out": 2,
2035      "cost_per_1m_in_cached": 0,
2036      "cost_per_1m_out_cached": 0.024999999999999998,
2037      "context_window": 400000,
2038      "default_max_tokens": 50000,
2039      "can_reason": true,
2040      "reasoning_levels": [
2041        "low",
2042        "medium",
2043        "high"
2044      ],
2045      "default_reasoning_effort": "medium",
2046      "supports_attachments": true,
2047      "options": {}
2048    },
2049    {
2050      "id": "openai/gpt-5.2",
2051      "name": "OpenAI: GPT-5.2",
2052      "cost_per_1m_in": 1.75,
2053      "cost_per_1m_out": 14,
2054      "cost_per_1m_in_cached": 0,
2055      "cost_per_1m_out_cached": 0.175,
2056      "context_window": 400000,
2057      "default_max_tokens": 64000,
2058      "can_reason": true,
2059      "reasoning_levels": [
2060        "low",
2061        "medium",
2062        "high"
2063      ],
2064      "default_reasoning_effort": "medium",
2065      "supports_attachments": true,
2066      "options": {}
2067    },
2068    {
2069      "id": "openai/gpt-5.2-chat",
2070      "name": "OpenAI: GPT-5.2 Chat",
2071      "cost_per_1m_in": 1.75,
2072      "cost_per_1m_out": 14,
2073      "cost_per_1m_in_cached": 0,
2074      "cost_per_1m_out_cached": 0.175,
2075      "context_window": 128000,
2076      "default_max_tokens": 16000,
2077      "can_reason": false,
2078      "supports_attachments": true,
2079      "options": {}
2080    },
2081    {
2082      "id": "openai/gpt-5.2-pro",
2083      "name": "OpenAI: GPT-5.2 Pro",
2084      "cost_per_1m_in": 21,
2085      "cost_per_1m_out": 168,
2086      "cost_per_1m_in_cached": 0,
2087      "cost_per_1m_out_cached": 0,
2088      "context_window": 400000,
2089      "default_max_tokens": 64000,
2090      "can_reason": true,
2091      "reasoning_levels": [
2092        "low",
2093        "medium",
2094        "high"
2095      ],
2096      "default_reasoning_effort": "medium",
2097      "supports_attachments": true,
2098      "options": {}
2099    },
2100    {
2101      "id": "openai/gpt-5.2-codex",
2102      "name": "OpenAI: GPT-5.2-Codex",
2103      "cost_per_1m_in": 1.75,
2104      "cost_per_1m_out": 14,
2105      "cost_per_1m_in_cached": 0,
2106      "cost_per_1m_out_cached": 0.175,
2107      "context_window": 400000,
2108      "default_max_tokens": 64000,
2109      "can_reason": true,
2110      "reasoning_levels": [
2111        "low",
2112        "medium",
2113        "high"
2114      ],
2115      "default_reasoning_effort": "medium",
2116      "supports_attachments": true,
2117      "options": {}
2118    },
2119    {
2120      "id": "openai/gpt-oss-120b",
2121      "name": "OpenAI: gpt-oss-120b",
2122      "cost_per_1m_in": 0.09999999999999999,
2123      "cost_per_1m_out": 0.49,
2124      "cost_per_1m_in_cached": 0,
2125      "cost_per_1m_out_cached": 0,
2126      "context_window": 131072,
2127      "default_max_tokens": 13107,
2128      "can_reason": true,
2129      "reasoning_levels": [
2130        "low",
2131        "medium",
2132        "high"
2133      ],
2134      "default_reasoning_effort": "medium",
2135      "supports_attachments": false,
2136      "options": {}
2137    },
2138    {
2139      "id": "openai/gpt-oss-120b:exacto",
2140      "name": "OpenAI: gpt-oss-120b (exacto)",
2141      "cost_per_1m_in": 0.15,
2142      "cost_per_1m_out": 0.6,
2143      "cost_per_1m_in_cached": 0,
2144      "cost_per_1m_out_cached": 0,
2145      "context_window": 131072,
2146      "default_max_tokens": 32768,
2147      "can_reason": true,
2148      "reasoning_levels": [
2149        "low",
2150        "medium",
2151        "high"
2152      ],
2153      "default_reasoning_effort": "medium",
2154      "supports_attachments": false,
2155      "options": {}
2156    },
2157    {
2158      "id": "openai/gpt-oss-120b:free",
2159      "name": "OpenAI: gpt-oss-120b (free)",
2160      "cost_per_1m_in": 0,
2161      "cost_per_1m_out": 0,
2162      "cost_per_1m_in_cached": 0,
2163      "cost_per_1m_out_cached": 0,
2164      "context_window": 131072,
2165      "default_max_tokens": 13107,
2166      "can_reason": true,
2167      "reasoning_levels": [
2168        "low",
2169        "medium",
2170        "high"
2171      ],
2172      "default_reasoning_effort": "medium",
2173      "supports_attachments": false,
2174      "options": {}
2175    },
2176    {
2177      "id": "openai/gpt-oss-20b",
2178      "name": "OpenAI: gpt-oss-20b",
2179      "cost_per_1m_in": 0.02,
2180      "cost_per_1m_out": 0.09999999999999999,
2181      "cost_per_1m_in_cached": 0,
2182      "cost_per_1m_out_cached": 0,
2183      "context_window": 131072,
2184      "default_max_tokens": 65536,
2185      "can_reason": true,
2186      "reasoning_levels": [
2187        "low",
2188        "medium",
2189        "high"
2190      ],
2191      "default_reasoning_effort": "medium",
2192      "supports_attachments": false,
2193      "options": {}
2194    },
2195    {
2196      "id": "openai/gpt-oss-20b:free",
2197      "name": "OpenAI: gpt-oss-20b (free)",
2198      "cost_per_1m_in": 0,
2199      "cost_per_1m_out": 0,
2200      "cost_per_1m_in_cached": 0,
2201      "cost_per_1m_out_cached": 0,
2202      "context_window": 131072,
2203      "default_max_tokens": 13107,
2204      "can_reason": true,
2205      "reasoning_levels": [
2206        "low",
2207        "medium",
2208        "high"
2209      ],
2210      "default_reasoning_effort": "medium",
2211      "supports_attachments": false,
2212      "options": {}
2213    },
2214    {
2215      "id": "openai/gpt-oss-safeguard-20b",
2216      "name": "OpenAI: gpt-oss-safeguard-20b",
2217      "cost_per_1m_in": 0.075,
2218      "cost_per_1m_out": 0.3,
2219      "cost_per_1m_in_cached": 0,
2220      "cost_per_1m_out_cached": 0.037,
2221      "context_window": 131072,
2222      "default_max_tokens": 32768,
2223      "can_reason": true,
2224      "reasoning_levels": [
2225        "low",
2226        "medium",
2227        "high"
2228      ],
2229      "default_reasoning_effort": "medium",
2230      "supports_attachments": false,
2231      "options": {}
2232    },
2233    {
2234      "id": "openai/o1",
2235      "name": "OpenAI: o1",
2236      "cost_per_1m_in": 15,
2237      "cost_per_1m_out": 60,
2238      "cost_per_1m_in_cached": 0,
2239      "cost_per_1m_out_cached": 7.5,
2240      "context_window": 200000,
2241      "default_max_tokens": 50000,
2242      "can_reason": false,
2243      "supports_attachments": true,
2244      "options": {}
2245    },
2246    {
2247      "id": "openai/o3",
2248      "name": "OpenAI: o3",
2249      "cost_per_1m_in": 2,
2250      "cost_per_1m_out": 8,
2251      "cost_per_1m_in_cached": 0,
2252      "cost_per_1m_out_cached": 0.5,
2253      "context_window": 200000,
2254      "default_max_tokens": 50000,
2255      "can_reason": true,
2256      "reasoning_levels": [
2257        "low",
2258        "medium",
2259        "high"
2260      ],
2261      "default_reasoning_effort": "medium",
2262      "supports_attachments": true,
2263      "options": {}
2264    },
2265    {
2266      "id": "openai/o3-deep-research",
2267      "name": "OpenAI: o3 Deep Research",
2268      "cost_per_1m_in": 10,
2269      "cost_per_1m_out": 40,
2270      "cost_per_1m_in_cached": 0,
2271      "cost_per_1m_out_cached": 2.5,
2272      "context_window": 200000,
2273      "default_max_tokens": 50000,
2274      "can_reason": true,
2275      "reasoning_levels": [
2276        "low",
2277        "medium",
2278        "high"
2279      ],
2280      "default_reasoning_effort": "medium",
2281      "supports_attachments": true,
2282      "options": {}
2283    },
2284    {
2285      "id": "openai/o3-mini",
2286      "name": "OpenAI: o3 Mini",
2287      "cost_per_1m_in": 1.1,
2288      "cost_per_1m_out": 4.4,
2289      "cost_per_1m_in_cached": 0,
2290      "cost_per_1m_out_cached": 0.55,
2291      "context_window": 200000,
2292      "default_max_tokens": 50000,
2293      "can_reason": false,
2294      "supports_attachments": false,
2295      "options": {}
2296    },
2297    {
2298      "id": "openai/o3-mini-high",
2299      "name": "OpenAI: o3 Mini High",
2300      "cost_per_1m_in": 1.1,
2301      "cost_per_1m_out": 4.4,
2302      "cost_per_1m_in_cached": 0,
2303      "cost_per_1m_out_cached": 0.55,
2304      "context_window": 200000,
2305      "default_max_tokens": 50000,
2306      "can_reason": false,
2307      "supports_attachments": false,
2308      "options": {}
2309    },
2310    {
2311      "id": "openai/o3-pro",
2312      "name": "OpenAI: o3 Pro",
2313      "cost_per_1m_in": 20,
2314      "cost_per_1m_out": 80,
2315      "cost_per_1m_in_cached": 0,
2316      "cost_per_1m_out_cached": 0,
2317      "context_window": 200000,
2318      "default_max_tokens": 50000,
2319      "can_reason": true,
2320      "reasoning_levels": [
2321        "low",
2322        "medium",
2323        "high"
2324      ],
2325      "default_reasoning_effort": "medium",
2326      "supports_attachments": true,
2327      "options": {}
2328    },
2329    {
2330      "id": "openai/o4-mini",
2331      "name": "OpenAI: o4 Mini",
2332      "cost_per_1m_in": 1.1,
2333      "cost_per_1m_out": 4.4,
2334      "cost_per_1m_in_cached": 0,
2335      "cost_per_1m_out_cached": 0.275,
2336      "context_window": 200000,
2337      "default_max_tokens": 50000,
2338      "can_reason": true,
2339      "reasoning_levels": [
2340        "low",
2341        "medium",
2342        "high"
2343      ],
2344      "default_reasoning_effort": "medium",
2345      "supports_attachments": true,
2346      "options": {}
2347    },
2348    {
2349      "id": "openai/o4-mini-deep-research",
2350      "name": "OpenAI: o4 Mini Deep Research",
2351      "cost_per_1m_in": 2,
2352      "cost_per_1m_out": 8,
2353      "cost_per_1m_in_cached": 0,
2354      "cost_per_1m_out_cached": 0.5,
2355      "context_window": 200000,
2356      "default_max_tokens": 50000,
2357      "can_reason": true,
2358      "reasoning_levels": [
2359        "low",
2360        "medium",
2361        "high"
2362      ],
2363      "default_reasoning_effort": "medium",
2364      "supports_attachments": true,
2365      "options": {}
2366    },
2367    {
2368      "id": "openai/o4-mini-high",
2369      "name": "OpenAI: o4 Mini High",
2370      "cost_per_1m_in": 1.1,
2371      "cost_per_1m_out": 4.4,
2372      "cost_per_1m_in_cached": 0,
2373      "cost_per_1m_out_cached": 0.275,
2374      "context_window": 200000,
2375      "default_max_tokens": 50000,
2376      "can_reason": true,
2377      "reasoning_levels": [
2378        "low",
2379        "medium",
2380        "high"
2381      ],
2382      "default_reasoning_effort": "medium",
2383      "supports_attachments": true,
2384      "options": {}
2385    },
2386    {
2387      "id": "prime-intellect/intellect-3",
2388      "name": "Prime Intellect: INTELLECT-3",
2389      "cost_per_1m_in": 0.19999999999999998,
2390      "cost_per_1m_out": 1.1,
2391      "cost_per_1m_in_cached": 0,
2392      "cost_per_1m_out_cached": 0,
2393      "context_window": 131072,
2394      "default_max_tokens": 65536,
2395      "can_reason": true,
2396      "reasoning_levels": [
2397        "low",
2398        "medium",
2399        "high"
2400      ],
2401      "default_reasoning_effort": "medium",
2402      "supports_attachments": false,
2403      "options": {}
2404    },
2405    {
2406      "id": "qwen/qwen-2.5-72b-instruct",
2407      "name": "Qwen2.5 72B Instruct",
2408      "cost_per_1m_in": 0.12,
2409      "cost_per_1m_out": 0.39,
2410      "cost_per_1m_in_cached": 0,
2411      "cost_per_1m_out_cached": 0,
2412      "context_window": 32768,
2413      "default_max_tokens": 8192,
2414      "can_reason": false,
2415      "supports_attachments": false,
2416      "options": {}
2417    },
2418    {
2419      "id": "qwen/qwen-2.5-7b-instruct",
2420      "name": "Qwen: Qwen2.5 7B Instruct",
2421      "cost_per_1m_in": 0.04,
2422      "cost_per_1m_out": 0.09999999999999999,
2423      "cost_per_1m_in_cached": 0,
2424      "cost_per_1m_out_cached": 0,
2425      "context_window": 128000,
2426      "default_max_tokens": 4096,
2427      "can_reason": false,
2428      "supports_attachments": false,
2429      "options": {}
2430    },
2431    {
2432      "id": "qwen/qwen3-14b",
2433      "name": "Qwen: Qwen3 14B",
2434      "cost_per_1m_in": 0.049999999999999996,
2435      "cost_per_1m_out": 0.22,
2436      "cost_per_1m_in_cached": 0,
2437      "cost_per_1m_out_cached": 0,
2438      "context_window": 40960,
2439      "default_max_tokens": 20480,
2440      "can_reason": true,
2441      "reasoning_levels": [
2442        "low",
2443        "medium",
2444        "high"
2445      ],
2446      "default_reasoning_effort": "medium",
2447      "supports_attachments": false,
2448      "options": {}
2449    },
2450    {
2451      "id": "qwen/qwen3-235b-a22b-2507",
2452      "name": "Qwen: Qwen3 235B A22B Instruct 2507",
2453      "cost_per_1m_in": 0.22,
2454      "cost_per_1m_out": 0.88,
2455      "cost_per_1m_in_cached": 0,
2456      "cost_per_1m_out_cached": 0,
2457      "context_window": 262144,
2458      "default_max_tokens": 8192,
2459      "can_reason": false,
2460      "supports_attachments": false,
2461      "options": {}
2462    },
2463    {
2464      "id": "qwen/qwen3-235b-a22b-thinking-2507",
2465      "name": "Qwen: Qwen3 235B A22B Thinking 2507",
2466      "cost_per_1m_in": 0.11,
2467      "cost_per_1m_out": 0.6,
2468      "cost_per_1m_in_cached": 0,
2469      "cost_per_1m_out_cached": 0,
2470      "context_window": 262144,
2471      "default_max_tokens": 131072,
2472      "can_reason": true,
2473      "reasoning_levels": [
2474        "low",
2475        "medium",
2476        "high"
2477      ],
2478      "default_reasoning_effort": "medium",
2479      "supports_attachments": false,
2480      "options": {}
2481    },
2482    {
2483      "id": "qwen/qwen3-30b-a3b",
2484      "name": "Qwen: Qwen3 30B A3B",
2485      "cost_per_1m_in": 0.08,
2486      "cost_per_1m_out": 0.28,
2487      "cost_per_1m_in_cached": 0,
2488      "cost_per_1m_out_cached": 0,
2489      "context_window": 131072,
2490      "default_max_tokens": 65536,
2491      "can_reason": true,
2492      "reasoning_levels": [
2493        "low",
2494        "medium",
2495        "high"
2496      ],
2497      "default_reasoning_effort": "medium",
2498      "supports_attachments": false,
2499      "options": {}
2500    },
2501    {
2502      "id": "qwen/qwen3-30b-a3b-instruct-2507",
2503      "name": "Qwen: Qwen3 30B A3B Instruct 2507",
2504      "cost_per_1m_in": 0.09999999999999999,
2505      "cost_per_1m_out": 0.3,
2506      "cost_per_1m_in_cached": 0,
2507      "cost_per_1m_out_cached": 0,
2508      "context_window": 262144,
2509      "default_max_tokens": 26214,
2510      "can_reason": false,
2511      "supports_attachments": false,
2512      "options": {}
2513    },
2514    {
2515      "id": "qwen/qwen3-30b-a3b-thinking-2507",
2516      "name": "Qwen: Qwen3 30B A3B Thinking 2507",
2517      "cost_per_1m_in": 0.09,
2518      "cost_per_1m_out": 0.3,
2519      "cost_per_1m_in_cached": 0,
2520      "cost_per_1m_out_cached": 0,
2521      "context_window": 262144,
2522      "default_max_tokens": 65536,
2523      "can_reason": true,
2524      "reasoning_levels": [
2525        "low",
2526        "medium",
2527        "high"
2528      ],
2529      "default_reasoning_effort": "medium",
2530      "supports_attachments": false,
2531      "options": {}
2532    },
2533    {
2534      "id": "qwen/qwen3-32b",
2535      "name": "Qwen: Qwen3 32B",
2536      "cost_per_1m_in": 0.15,
2537      "cost_per_1m_out": 0.5,
2538      "cost_per_1m_in_cached": 0,
2539      "cost_per_1m_out_cached": 0,
2540      "context_window": 131072,
2541      "default_max_tokens": 4000,
2542      "can_reason": true,
2543      "reasoning_levels": [
2544        "low",
2545        "medium",
2546        "high"
2547      ],
2548      "default_reasoning_effort": "medium",
2549      "supports_attachments": false,
2550      "options": {}
2551    },
2552    {
2553      "id": "qwen/qwen3-4b:free",
2554      "name": "Qwen: Qwen3 4B (free)",
2555      "cost_per_1m_in": 0,
2556      "cost_per_1m_out": 0,
2557      "cost_per_1m_in_cached": 0,
2558      "cost_per_1m_out_cached": 0,
2559      "context_window": 40960,
2560      "default_max_tokens": 4096,
2561      "can_reason": true,
2562      "reasoning_levels": [
2563        "low",
2564        "medium",
2565        "high"
2566      ],
2567      "default_reasoning_effort": "medium",
2568      "supports_attachments": false,
2569      "options": {}
2570    },
2571    {
2572      "id": "qwen/qwen3-8b",
2573      "name": "Qwen: Qwen3 8B",
2574      "cost_per_1m_in": 0.2,
2575      "cost_per_1m_out": 0.2,
2576      "cost_per_1m_in_cached": 0,
2577      "cost_per_1m_out_cached": 0,
2578      "context_window": 40960,
2579      "default_max_tokens": 4096,
2580      "can_reason": true,
2581      "reasoning_levels": [
2582        "low",
2583        "medium",
2584        "high"
2585      ],
2586      "default_reasoning_effort": "medium",
2587      "supports_attachments": false,
2588      "options": {}
2589    },
2590    {
2591      "id": "qwen/qwen3-coder-30b-a3b-instruct",
2592      "name": "Qwen: Qwen3 Coder 30B A3B Instruct",
2593      "cost_per_1m_in": 0.07,
2594      "cost_per_1m_out": 0.28,
2595      "cost_per_1m_in_cached": 0,
2596      "cost_per_1m_out_cached": 0,
2597      "context_window": 262144,
2598      "default_max_tokens": 131072,
2599      "can_reason": false,
2600      "supports_attachments": false,
2601      "options": {}
2602    },
2603    {
2604      "id": "qwen/qwen3-coder",
2605      "name": "Qwen: Qwen3 Coder 480B A35B",
2606      "cost_per_1m_in": 0.22,
2607      "cost_per_1m_out": 1.7999999999999998,
2608      "cost_per_1m_in_cached": 0,
2609      "cost_per_1m_out_cached": 0,
2610      "context_window": 262144,
2611      "default_max_tokens": 32768,
2612      "can_reason": false,
2613      "supports_attachments": false,
2614      "options": {}
2615    },
2616    {
2617      "id": "qwen/qwen3-coder:exacto",
2618      "name": "Qwen: Qwen3 Coder 480B A35B (exacto)",
2619      "cost_per_1m_in": 0.22,
2620      "cost_per_1m_out": 1.7999999999999998,
2621      "cost_per_1m_in_cached": 0,
2622      "cost_per_1m_out_cached": 0,
2623      "context_window": 262144,
2624      "default_max_tokens": 32768,
2625      "can_reason": false,
2626      "supports_attachments": false,
2627      "options": {}
2628    },
2629    {
2630      "id": "qwen/qwen3-coder:free",
2631      "name": "Qwen: Qwen3 Coder 480B A35B (free)",
2632      "cost_per_1m_in": 0,
2633      "cost_per_1m_out": 0,
2634      "cost_per_1m_in_cached": 0,
2635      "cost_per_1m_out_cached": 0,
2636      "context_window": 262000,
2637      "default_max_tokens": 131000,
2638      "can_reason": false,
2639      "supports_attachments": false,
2640      "options": {}
2641    },
2642    {
2643      "id": "qwen/qwen3-next-80b-a3b-instruct",
2644      "name": "Qwen: Qwen3 Next 80B A3B Instruct",
2645      "cost_per_1m_in": 0.15,
2646      "cost_per_1m_out": 1.5,
2647      "cost_per_1m_in_cached": 0,
2648      "cost_per_1m_out_cached": 0,
2649      "context_window": 262144,
2650      "default_max_tokens": 26214,
2651      "can_reason": false,
2652      "supports_attachments": false,
2653      "options": {}
2654    },
2655    {
2656      "id": "qwen/qwen3-next-80b-a3b-instruct:free",
2657      "name": "Qwen: Qwen3 Next 80B A3B Instruct (free)",
2658      "cost_per_1m_in": 0,
2659      "cost_per_1m_out": 0,
2660      "cost_per_1m_in_cached": 0,
2661      "cost_per_1m_out_cached": 0,
2662      "context_window": 262144,
2663      "default_max_tokens": 26214,
2664      "can_reason": false,
2665      "supports_attachments": false,
2666      "options": {}
2667    },
2668    {
2669      "id": "qwen/qwen3-next-80b-a3b-thinking",
2670      "name": "Qwen: Qwen3 Next 80B A3B Thinking",
2671      "cost_per_1m_in": 0.15,
2672      "cost_per_1m_out": 1.2,
2673      "cost_per_1m_in_cached": 0,
2674      "cost_per_1m_out_cached": 0,
2675      "context_window": 262144,
2676      "default_max_tokens": 131072,
2677      "can_reason": true,
2678      "reasoning_levels": [
2679        "low",
2680        "medium",
2681        "high"
2682      ],
2683      "default_reasoning_effort": "medium",
2684      "supports_attachments": false,
2685      "options": {}
2686    },
2687    {
2688      "id": "qwen/qwen3-vl-235b-a22b-instruct",
2689      "name": "Qwen: Qwen3 VL 235B A22B Instruct",
2690      "cost_per_1m_in": 0.22,
2691      "cost_per_1m_out": 0.88,
2692      "cost_per_1m_in_cached": 0,
2693      "cost_per_1m_out_cached": 0,
2694      "context_window": 262144,
2695      "default_max_tokens": 26214,
2696      "can_reason": false,
2697      "supports_attachments": true,
2698      "options": {}
2699    },
2700    {
2701      "id": "qwen/qwen3-vl-235b-a22b-thinking",
2702      "name": "Qwen: Qwen3 VL 235B A22B Thinking",
2703      "cost_per_1m_in": 0.44999999999999996,
2704      "cost_per_1m_out": 3.5,
2705      "cost_per_1m_in_cached": 0,
2706      "cost_per_1m_out_cached": 0,
2707      "context_window": 262144,
2708      "default_max_tokens": 131072,
2709      "can_reason": true,
2710      "reasoning_levels": [
2711        "low",
2712        "medium",
2713        "high"
2714      ],
2715      "default_reasoning_effort": "medium",
2716      "supports_attachments": true,
2717      "options": {}
2718    },
2719    {
2720      "id": "qwen/qwen3-vl-30b-a3b-instruct",
2721      "name": "Qwen: Qwen3 VL 30B A3B Instruct",
2722      "cost_per_1m_in": 0.15,
2723      "cost_per_1m_out": 0.6,
2724      "cost_per_1m_in_cached": 0,
2725      "cost_per_1m_out_cached": 0,
2726      "context_window": 262144,
2727      "default_max_tokens": 26214,
2728      "can_reason": false,
2729      "supports_attachments": true,
2730      "options": {}
2731    },
2732    {
2733      "id": "qwen/qwen3-vl-30b-a3b-thinking",
2734      "name": "Qwen: Qwen3 VL 30B A3B Thinking",
2735      "cost_per_1m_in": 0.29,
2736      "cost_per_1m_out": 1,
2737      "cost_per_1m_in_cached": 0,
2738      "cost_per_1m_out_cached": 0,
2739      "context_window": 262144,
2740      "default_max_tokens": 131072,
2741      "can_reason": true,
2742      "reasoning_levels": [
2743        "low",
2744        "medium",
2745        "high"
2746      ],
2747      "default_reasoning_effort": "medium",
2748      "supports_attachments": true,
2749      "options": {}
2750    },
2751    {
2752      "id": "relace/relace-search",
2753      "name": "Relace: Relace Search",
2754      "cost_per_1m_in": 1,
2755      "cost_per_1m_out": 3,
2756      "cost_per_1m_in_cached": 0,
2757      "cost_per_1m_out_cached": 0,
2758      "context_window": 256000,
2759      "default_max_tokens": 64000,
2760      "can_reason": false,
2761      "supports_attachments": false,
2762      "options": {}
2763    },
2764    {
2765      "id": "stepfun-ai/step3",
2766      "name": "StepFun: Step3",
2767      "cost_per_1m_in": 0.5700000000000001,
2768      "cost_per_1m_out": 1.42,
2769      "cost_per_1m_in_cached": 0,
2770      "cost_per_1m_out_cached": 0,
2771      "context_window": 65536,
2772      "default_max_tokens": 32768,
2773      "can_reason": true,
2774      "reasoning_levels": [
2775        "low",
2776        "medium",
2777        "high"
2778      ],
2779      "default_reasoning_effort": "medium",
2780      "supports_attachments": true,
2781      "options": {}
2782    },
2783    {
2784      "id": "tngtech/deepseek-r1t2-chimera",
2785      "name": "TNG: DeepSeek R1T2 Chimera",
2786      "cost_per_1m_in": 0.25,
2787      "cost_per_1m_out": 0.85,
2788      "cost_per_1m_in_cached": 0,
2789      "cost_per_1m_out_cached": 0,
2790      "context_window": 163840,
2791      "default_max_tokens": 81920,
2792      "can_reason": true,
2793      "reasoning_levels": [
2794        "low",
2795        "medium",
2796        "high"
2797      ],
2798      "default_reasoning_effort": "medium",
2799      "supports_attachments": false,
2800      "options": {}
2801    },
2802    {
2803      "id": "tngtech/tng-r1t-chimera",
2804      "name": "TNG: R1T Chimera",
2805      "cost_per_1m_in": 0.25,
2806      "cost_per_1m_out": 0.85,
2807      "cost_per_1m_in_cached": 0,
2808      "cost_per_1m_out_cached": 0,
2809      "context_window": 163840,
2810      "default_max_tokens": 32768,
2811      "can_reason": true,
2812      "reasoning_levels": [
2813        "low",
2814        "medium",
2815        "high"
2816      ],
2817      "default_reasoning_effort": "medium",
2818      "supports_attachments": false,
2819      "options": {}
2820    },
2821    {
2822      "id": "tngtech/tng-r1t-chimera:free",
2823      "name": "TNG: R1T Chimera (free)",
2824      "cost_per_1m_in": 0,
2825      "cost_per_1m_out": 0,
2826      "cost_per_1m_in_cached": 0,
2827      "cost_per_1m_out_cached": 0,
2828      "context_window": 163840,
2829      "default_max_tokens": 32768,
2830      "can_reason": true,
2831      "reasoning_levels": [
2832        "low",
2833        "medium",
2834        "high"
2835      ],
2836      "default_reasoning_effort": "medium",
2837      "supports_attachments": false,
2838      "options": {}
2839    },
2840    {
2841      "id": "thedrummer/rocinante-12b",
2842      "name": "TheDrummer: Rocinante 12B",
2843      "cost_per_1m_in": 0.16999999999999998,
2844      "cost_per_1m_out": 0.43,
2845      "cost_per_1m_in_cached": 0,
2846      "cost_per_1m_out_cached": 0,
2847      "context_window": 32768,
2848      "default_max_tokens": 3276,
2849      "can_reason": false,
2850      "supports_attachments": false,
2851      "options": {}
2852    },
2853    {
2854      "id": "thedrummer/unslopnemo-12b",
2855      "name": "TheDrummer: UnslopNemo 12B",
2856      "cost_per_1m_in": 0.39999999999999997,
2857      "cost_per_1m_out": 0.39999999999999997,
2858      "cost_per_1m_in_cached": 0,
2859      "cost_per_1m_out_cached": 0,
2860      "context_window": 32768,
2861      "default_max_tokens": 3276,
2862      "can_reason": false,
2863      "supports_attachments": false,
2864      "options": {}
2865    },
2866    {
2867      "id": "alibaba/tongyi-deepresearch-30b-a3b",
2868      "name": "Tongyi DeepResearch 30B A3B",
2869      "cost_per_1m_in": 0.09,
2870      "cost_per_1m_out": 0.39999999999999997,
2871      "cost_per_1m_in_cached": 0,
2872      "cost_per_1m_out_cached": 0,
2873      "context_window": 131072,
2874      "default_max_tokens": 65536,
2875      "can_reason": true,
2876      "reasoning_levels": [
2877        "low",
2878        "medium",
2879        "high"
2880      ],
2881      "default_reasoning_effort": "medium",
2882      "supports_attachments": false,
2883      "options": {}
2884    },
2885    {
2886      "id": "xiaomi/mimo-v2-flash",
2887      "name": "Xiaomi: MiMo-V2-Flash",
2888      "cost_per_1m_in": 0.09999999999999999,
2889      "cost_per_1m_out": 0.3,
2890      "cost_per_1m_in_cached": 0,
2891      "cost_per_1m_out_cached": 0.02,
2892      "context_window": 262144,
2893      "default_max_tokens": 16000,
2894      "can_reason": true,
2895      "reasoning_levels": [
2896        "low",
2897        "medium",
2898        "high"
2899      ],
2900      "default_reasoning_effort": "medium",
2901      "supports_attachments": false,
2902      "options": {}
2903    },
2904    {
2905      "id": "xiaomi/mimo-v2-flash:free",
2906      "name": "Xiaomi: MiMo-V2-Flash (free)",
2907      "cost_per_1m_in": 0,
2908      "cost_per_1m_out": 0,
2909      "cost_per_1m_in_cached": 0,
2910      "cost_per_1m_out_cached": 0,
2911      "context_window": 262144,
2912      "default_max_tokens": 32768,
2913      "can_reason": true,
2914      "reasoning_levels": [
2915        "low",
2916        "medium",
2917        "high"
2918      ],
2919      "default_reasoning_effort": "medium",
2920      "supports_attachments": false,
2921      "options": {}
2922    },
2923    {
2924      "id": "z-ai/glm-4-32b",
2925      "name": "Z.AI: GLM 4 32B ",
2926      "cost_per_1m_in": 0.09999999999999999,
2927      "cost_per_1m_out": 0.09999999999999999,
2928      "cost_per_1m_in_cached": 0,
2929      "cost_per_1m_out_cached": 0,
2930      "context_window": 128000,
2931      "default_max_tokens": 12800,
2932      "can_reason": false,
2933      "supports_attachments": false,
2934      "options": {}
2935    },
2936    {
2937      "id": "z-ai/glm-4.5",
2938      "name": "Z.AI: GLM 4.5",
2939      "cost_per_1m_in": 0.6,
2940      "cost_per_1m_out": 2.2,
2941      "cost_per_1m_in_cached": 0,
2942      "cost_per_1m_out_cached": 0.11,
2943      "context_window": 131072,
2944      "default_max_tokens": 49152,
2945      "can_reason": true,
2946      "reasoning_levels": [
2947        "low",
2948        "medium",
2949        "high"
2950      ],
2951      "default_reasoning_effort": "medium",
2952      "supports_attachments": false,
2953      "options": {}
2954    },
2955    {
2956      "id": "z-ai/glm-4.5-air",
2957      "name": "Z.AI: GLM 4.5 Air",
2958      "cost_per_1m_in": 0.13,
2959      "cost_per_1m_out": 0.85,
2960      "cost_per_1m_in_cached": 0,
2961      "cost_per_1m_out_cached": 0,
2962      "context_window": 131072,
2963      "default_max_tokens": 49152,
2964      "can_reason": true,
2965      "reasoning_levels": [
2966        "low",
2967        "medium",
2968        "high"
2969      ],
2970      "default_reasoning_effort": "medium",
2971      "supports_attachments": false,
2972      "options": {}
2973    },
2974    {
2975      "id": "z-ai/glm-4.5-air:free",
2976      "name": "Z.AI: GLM 4.5 Air (free)",
2977      "cost_per_1m_in": 0,
2978      "cost_per_1m_out": 0,
2979      "cost_per_1m_in_cached": 0,
2980      "cost_per_1m_out_cached": 0,
2981      "context_window": 131072,
2982      "default_max_tokens": 48000,
2983      "can_reason": true,
2984      "reasoning_levels": [
2985        "low",
2986        "medium",
2987        "high"
2988      ],
2989      "default_reasoning_effort": "medium",
2990      "supports_attachments": false,
2991      "options": {}
2992    },
2993    {
2994      "id": "z-ai/glm-4.5v",
2995      "name": "Z.AI: GLM 4.5V",
2996      "cost_per_1m_in": 0.6,
2997      "cost_per_1m_out": 1.7999999999999998,
2998      "cost_per_1m_in_cached": 0,
2999      "cost_per_1m_out_cached": 0.11,
3000      "context_window": 65536,
3001      "default_max_tokens": 8192,
3002      "can_reason": true,
3003      "reasoning_levels": [
3004        "low",
3005        "medium",
3006        "high"
3007      ],
3008      "default_reasoning_effort": "medium",
3009      "supports_attachments": true,
3010      "options": {}
3011    },
3012    {
3013      "id": "z-ai/glm-4.6",
3014      "name": "Z.AI: GLM 4.6",
3015      "cost_per_1m_in": 0.39,
3016      "cost_per_1m_out": 1.9,
3017      "cost_per_1m_in_cached": 0,
3018      "cost_per_1m_out_cached": 0,
3019      "context_window": 204800,
3020      "default_max_tokens": 102400,
3021      "can_reason": true,
3022      "reasoning_levels": [
3023        "low",
3024        "medium",
3025        "high"
3026      ],
3027      "default_reasoning_effort": "medium",
3028      "supports_attachments": false,
3029      "options": {}
3030    },
3031    {
3032      "id": "z-ai/glm-4.6:exacto",
3033      "name": "Z.AI: GLM 4.6 (exacto)",
3034      "cost_per_1m_in": 0.44,
3035      "cost_per_1m_out": 1.76,
3036      "cost_per_1m_in_cached": 0,
3037      "cost_per_1m_out_cached": 0,
3038      "context_window": 204800,
3039      "default_max_tokens": 65536,
3040      "can_reason": true,
3041      "reasoning_levels": [
3042        "low",
3043        "medium",
3044        "high"
3045      ],
3046      "default_reasoning_effort": "medium",
3047      "supports_attachments": false,
3048      "options": {}
3049    },
3050    {
3051      "id": "z-ai/glm-4.6v",
3052      "name": "Z.AI: GLM 4.6V",
3053      "cost_per_1m_in": 0.3,
3054      "cost_per_1m_out": 0.8999999999999999,
3055      "cost_per_1m_in_cached": 0,
3056      "cost_per_1m_out_cached": 0,
3057      "context_window": 131072,
3058      "default_max_tokens": 65536,
3059      "can_reason": true,
3060      "reasoning_levels": [
3061        "low",
3062        "medium",
3063        "high"
3064      ],
3065      "default_reasoning_effort": "medium",
3066      "supports_attachments": true,
3067      "options": {}
3068    },
3069    {
3070      "id": "z-ai/glm-4.7",
3071      "name": "Z.AI: GLM 4.7",
3072      "cost_per_1m_in": 0.42,
3073      "cost_per_1m_out": 2.2,
3074      "cost_per_1m_in_cached": 0,
3075      "cost_per_1m_out_cached": 0,
3076      "context_window": 204800,
3077      "default_max_tokens": 102400,
3078      "can_reason": true,
3079      "reasoning_levels": [
3080        "low",
3081        "medium",
3082        "high"
3083      ],
3084      "default_reasoning_effort": "medium",
3085      "supports_attachments": false,
3086      "options": {}
3087    },
3088    {
3089      "id": "x-ai/grok-3",
3090      "name": "xAI: Grok 3",
3091      "cost_per_1m_in": 3,
3092      "cost_per_1m_out": 15,
3093      "cost_per_1m_in_cached": 0,
3094      "cost_per_1m_out_cached": 0.75,
3095      "context_window": 131072,
3096      "default_max_tokens": 13107,
3097      "can_reason": false,
3098      "supports_attachments": false,
3099      "options": {}
3100    },
3101    {
3102      "id": "x-ai/grok-3-beta",
3103      "name": "xAI: Grok 3 Beta",
3104      "cost_per_1m_in": 3,
3105      "cost_per_1m_out": 15,
3106      "cost_per_1m_in_cached": 0,
3107      "cost_per_1m_out_cached": 0.75,
3108      "context_window": 131072,
3109      "default_max_tokens": 13107,
3110      "can_reason": false,
3111      "supports_attachments": false,
3112      "options": {}
3113    },
3114    {
3115      "id": "x-ai/grok-3-mini",
3116      "name": "xAI: Grok 3 Mini",
3117      "cost_per_1m_in": 0.6,
3118      "cost_per_1m_out": 4,
3119      "cost_per_1m_in_cached": 0,
3120      "cost_per_1m_out_cached": 0.15,
3121      "context_window": 131072,
3122      "default_max_tokens": 13107,
3123      "can_reason": true,
3124      "reasoning_levels": [
3125        "low",
3126        "medium",
3127        "high"
3128      ],
3129      "default_reasoning_effort": "medium",
3130      "supports_attachments": false,
3131      "options": {}
3132    },
3133    {
3134      "id": "x-ai/grok-3-mini-beta",
3135      "name": "xAI: Grok 3 Mini Beta",
3136      "cost_per_1m_in": 0.6,
3137      "cost_per_1m_out": 4,
3138      "cost_per_1m_in_cached": 0,
3139      "cost_per_1m_out_cached": 0.15,
3140      "context_window": 131072,
3141      "default_max_tokens": 13107,
3142      "can_reason": true,
3143      "reasoning_levels": [
3144        "low",
3145        "medium",
3146        "high"
3147      ],
3148      "default_reasoning_effort": "medium",
3149      "supports_attachments": false,
3150      "options": {}
3151    },
3152    {
3153      "id": "x-ai/grok-4",
3154      "name": "xAI: Grok 4",
3155      "cost_per_1m_in": 3,
3156      "cost_per_1m_out": 15,
3157      "cost_per_1m_in_cached": 0,
3158      "cost_per_1m_out_cached": 0.75,
3159      "context_window": 256000,
3160      "default_max_tokens": 25600,
3161      "can_reason": true,
3162      "reasoning_levels": [
3163        "low",
3164        "medium",
3165        "high"
3166      ],
3167      "default_reasoning_effort": "medium",
3168      "supports_attachments": true,
3169      "options": {}
3170    },
3171    {
3172      "id": "x-ai/grok-4-fast",
3173      "name": "xAI: Grok 4 Fast",
3174      "cost_per_1m_in": 0.19999999999999998,
3175      "cost_per_1m_out": 0.5,
3176      "cost_per_1m_in_cached": 0,
3177      "cost_per_1m_out_cached": 0.049999999999999996,
3178      "context_window": 2000000,
3179      "default_max_tokens": 15000,
3180      "can_reason": true,
3181      "reasoning_levels": [
3182        "low",
3183        "medium",
3184        "high"
3185      ],
3186      "default_reasoning_effort": "medium",
3187      "supports_attachments": true,
3188      "options": {}
3189    },
3190    {
3191      "id": "x-ai/grok-4.1-fast",
3192      "name": "xAI: Grok 4.1 Fast",
3193      "cost_per_1m_in": 0.19999999999999998,
3194      "cost_per_1m_out": 0.5,
3195      "cost_per_1m_in_cached": 0,
3196      "cost_per_1m_out_cached": 0.049999999999999996,
3197      "context_window": 2000000,
3198      "default_max_tokens": 15000,
3199      "can_reason": true,
3200      "reasoning_levels": [
3201        "low",
3202        "medium",
3203        "high"
3204      ],
3205      "default_reasoning_effort": "medium",
3206      "supports_attachments": true,
3207      "options": {}
3208    },
3209    {
3210      "id": "x-ai/grok-code-fast-1",
3211      "name": "xAI: Grok Code Fast 1",
3212      "cost_per_1m_in": 0.19999999999999998,
3213      "cost_per_1m_out": 1.5,
3214      "cost_per_1m_in_cached": 0,
3215      "cost_per_1m_out_cached": 0.02,
3216      "context_window": 256000,
3217      "default_max_tokens": 5000,
3218      "can_reason": true,
3219      "reasoning_levels": [
3220        "low",
3221        "medium",
3222        "high"
3223      ],
3224      "default_reasoning_effort": "medium",
3225      "supports_attachments": false,
3226      "options": {}
3227    }
3228  ],
3229  "default_headers": {
3230    "HTTP-Referer": "https://charm.land",
3231    "X-Title": "Crush"
3232  }
3233}