openrouter.json

   1{
   2  "name": "OpenRouter",
   3  "id": "openrouter",
   4  "api_key": "$OPENROUTER_API_KEY",
   5  "api_endpoint": "https://openrouter.ai/api/v1",
   6  "type": "openrouter",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-3.5-haiku",
   9  "models": [
  10    {
  11      "id": "ai21/jamba-large-1.7",
  12      "name": "AI21: Jamba Large 1.7",
  13      "cost_per_1m_in": 2,
  14      "cost_per_1m_out": 8,
  15      "cost_per_1m_in_cached": 0,
  16      "cost_per_1m_out_cached": 0,
  17      "context_window": 256000,
  18      "default_max_tokens": 2048,
  19      "can_reason": false,
  20      "supports_attachments": false,
  21      "options": {}
  22    },
  23    {
  24      "id": "ai21/jamba-mini-1.7",
  25      "name": "AI21: Jamba Mini 1.7",
  26      "cost_per_1m_in": 0.19999999999999998,
  27      "cost_per_1m_out": 0.39999999999999997,
  28      "cost_per_1m_in_cached": 0,
  29      "cost_per_1m_out_cached": 0,
  30      "context_window": 256000,
  31      "default_max_tokens": 2048,
  32      "can_reason": false,
  33      "supports_attachments": false,
  34      "options": {}
  35    },
  36    {
  37      "id": "allenai/olmo-3-7b-instruct",
  38      "name": "AllenAI: Olmo 3 7B Instruct",
  39      "cost_per_1m_in": 0.09999999999999999,
  40      "cost_per_1m_out": 0.19999999999999998,
  41      "cost_per_1m_in_cached": 0,
  42      "cost_per_1m_out_cached": 0,
  43      "context_window": 65536,
  44      "default_max_tokens": 32768,
  45      "can_reason": false,
  46      "supports_attachments": false,
  47      "options": {}
  48    },
  49    {
  50      "id": "amazon/nova-2-lite-v1",
  51      "name": "Amazon: Nova 2 Lite",
  52      "cost_per_1m_in": 0.3,
  53      "cost_per_1m_out": 2.5,
  54      "cost_per_1m_in_cached": 0,
  55      "cost_per_1m_out_cached": 0,
  56      "context_window": 1000000,
  57      "default_max_tokens": 32767,
  58      "can_reason": true,
  59      "reasoning_levels": [
  60        "low",
  61        "medium",
  62        "high"
  63      ],
  64      "default_reasoning_effort": "medium",
  65      "supports_attachments": true,
  66      "options": {}
  67    },
  68    {
  69      "id": "amazon/nova-lite-v1",
  70      "name": "Amazon: Nova Lite 1.0",
  71      "cost_per_1m_in": 0.06,
  72      "cost_per_1m_out": 0.24,
  73      "cost_per_1m_in_cached": 0,
  74      "cost_per_1m_out_cached": 0,
  75      "context_window": 300000,
  76      "default_max_tokens": 2560,
  77      "can_reason": false,
  78      "supports_attachments": true,
  79      "options": {}
  80    },
  81    {
  82      "id": "amazon/nova-micro-v1",
  83      "name": "Amazon: Nova Micro 1.0",
  84      "cost_per_1m_in": 0.035,
  85      "cost_per_1m_out": 0.14,
  86      "cost_per_1m_in_cached": 0,
  87      "cost_per_1m_out_cached": 0,
  88      "context_window": 128000,
  89      "default_max_tokens": 2560,
  90      "can_reason": false,
  91      "supports_attachments": false,
  92      "options": {}
  93    },
  94    {
  95      "id": "amazon/nova-premier-v1",
  96      "name": "Amazon: Nova Premier 1.0",
  97      "cost_per_1m_in": 2.5,
  98      "cost_per_1m_out": 12.5,
  99      "cost_per_1m_in_cached": 0,
 100      "cost_per_1m_out_cached": 0.625,
 101      "context_window": 1000000,
 102      "default_max_tokens": 16000,
 103      "can_reason": false,
 104      "supports_attachments": true,
 105      "options": {}
 106    },
 107    {
 108      "id": "amazon/nova-pro-v1",
 109      "name": "Amazon: Nova Pro 1.0",
 110      "cost_per_1m_in": 0.7999999999999999,
 111      "cost_per_1m_out": 3.1999999999999997,
 112      "cost_per_1m_in_cached": 0,
 113      "cost_per_1m_out_cached": 0,
 114      "context_window": 300000,
 115      "default_max_tokens": 2560,
 116      "can_reason": false,
 117      "supports_attachments": true,
 118      "options": {}
 119    },
 120    {
 121      "id": "anthropic/claude-3-haiku",
 122      "name": "Anthropic: Claude 3 Haiku",
 123      "cost_per_1m_in": 0.25,
 124      "cost_per_1m_out": 1.25,
 125      "cost_per_1m_in_cached": 0.3,
 126      "cost_per_1m_out_cached": 0.03,
 127      "context_window": 200000,
 128      "default_max_tokens": 2048,
 129      "can_reason": false,
 130      "supports_attachments": true,
 131      "options": {}
 132    },
 133    {
 134      "id": "anthropic/claude-3-opus",
 135      "name": "Anthropic: Claude 3 Opus",
 136      "cost_per_1m_in": 15,
 137      "cost_per_1m_out": 75,
 138      "cost_per_1m_in_cached": 18.75,
 139      "cost_per_1m_out_cached": 1.5,
 140      "context_window": 200000,
 141      "default_max_tokens": 2048,
 142      "can_reason": false,
 143      "supports_attachments": true,
 144      "options": {}
 145    },
 146    {
 147      "id": "anthropic/claude-3.5-haiku",
 148      "name": "Anthropic: Claude 3.5 Haiku",
 149      "cost_per_1m_in": 0.7999999999999999,
 150      "cost_per_1m_out": 4,
 151      "cost_per_1m_in_cached": 1,
 152      "cost_per_1m_out_cached": 0.08,
 153      "context_window": 200000,
 154      "default_max_tokens": 4096,
 155      "can_reason": false,
 156      "supports_attachments": true,
 157      "options": {}
 158    },
 159    {
 160      "id": "anthropic/claude-3.5-haiku-20241022",
 161      "name": "Anthropic: Claude 3.5 Haiku (2024-10-22)",
 162      "cost_per_1m_in": 0.7999999999999999,
 163      "cost_per_1m_out": 4,
 164      "cost_per_1m_in_cached": 1,
 165      "cost_per_1m_out_cached": 0.08,
 166      "context_window": 200000,
 167      "default_max_tokens": 4096,
 168      "can_reason": false,
 169      "supports_attachments": true,
 170      "options": {}
 171    },
 172    {
 173      "id": "anthropic/claude-3.5-sonnet",
 174      "name": "Anthropic: Claude 3.5 Sonnet",
 175      "cost_per_1m_in": 6,
 176      "cost_per_1m_out": 30,
 177      "cost_per_1m_in_cached": 0,
 178      "cost_per_1m_out_cached": 0,
 179      "context_window": 200000,
 180      "default_max_tokens": 4096,
 181      "can_reason": false,
 182      "supports_attachments": true,
 183      "options": {}
 184    },
 185    {
 186      "id": "anthropic/claude-3.7-sonnet",
 187      "name": "Anthropic: Claude 3.7 Sonnet",
 188      "cost_per_1m_in": 3,
 189      "cost_per_1m_out": 15,
 190      "cost_per_1m_in_cached": 3.75,
 191      "cost_per_1m_out_cached": 0.3,
 192      "context_window": 200000,
 193      "default_max_tokens": 32000,
 194      "can_reason": true,
 195      "reasoning_levels": [
 196        "low",
 197        "medium",
 198        "high"
 199      ],
 200      "default_reasoning_effort": "medium",
 201      "supports_attachments": true,
 202      "options": {}
 203    },
 204    {
 205      "id": "anthropic/claude-3.7-sonnet:thinking",
 206      "name": "Anthropic: Claude 3.7 Sonnet (thinking)",
 207      "cost_per_1m_in": 3,
 208      "cost_per_1m_out": 15,
 209      "cost_per_1m_in_cached": 3.75,
 210      "cost_per_1m_out_cached": 0.3,
 211      "context_window": 200000,
 212      "default_max_tokens": 32000,
 213      "can_reason": true,
 214      "reasoning_levels": [
 215        "low",
 216        "medium",
 217        "high"
 218      ],
 219      "default_reasoning_effort": "medium",
 220      "supports_attachments": true,
 221      "options": {}
 222    },
 223    {
 224      "id": "anthropic/claude-haiku-4.5",
 225      "name": "Anthropic: Claude Haiku 4.5",
 226      "cost_per_1m_in": 1,
 227      "cost_per_1m_out": 5,
 228      "cost_per_1m_in_cached": 1.25,
 229      "cost_per_1m_out_cached": 0.09999999999999999,
 230      "context_window": 200000,
 231      "default_max_tokens": 32000,
 232      "can_reason": true,
 233      "reasoning_levels": [
 234        "low",
 235        "medium",
 236        "high"
 237      ],
 238      "default_reasoning_effort": "medium",
 239      "supports_attachments": true,
 240      "options": {}
 241    },
 242    {
 243      "id": "anthropic/claude-opus-4",
 244      "name": "Anthropic: Claude Opus 4",
 245      "cost_per_1m_in": 15,
 246      "cost_per_1m_out": 75,
 247      "cost_per_1m_in_cached": 18.75,
 248      "cost_per_1m_out_cached": 1.5,
 249      "context_window": 200000,
 250      "default_max_tokens": 16000,
 251      "can_reason": true,
 252      "reasoning_levels": [
 253        "low",
 254        "medium",
 255        "high"
 256      ],
 257      "default_reasoning_effort": "medium",
 258      "supports_attachments": true,
 259      "options": {}
 260    },
 261    {
 262      "id": "anthropic/claude-opus-4.1",
 263      "name": "Anthropic: Claude Opus 4.1",
 264      "cost_per_1m_in": 15,
 265      "cost_per_1m_out": 75,
 266      "cost_per_1m_in_cached": 18.75,
 267      "cost_per_1m_out_cached": 1.5,
 268      "context_window": 200000,
 269      "default_max_tokens": 20000,
 270      "can_reason": true,
 271      "reasoning_levels": [
 272        "low",
 273        "medium",
 274        "high"
 275      ],
 276      "default_reasoning_effort": "medium",
 277      "supports_attachments": true,
 278      "options": {}
 279    },
 280    {
 281      "id": "anthropic/claude-opus-4.5",
 282      "name": "Anthropic: Claude Opus 4.5",
 283      "cost_per_1m_in": 5,
 284      "cost_per_1m_out": 25,
 285      "cost_per_1m_in_cached": 6.25,
 286      "cost_per_1m_out_cached": 0.5,
 287      "context_window": 200000,
 288      "default_max_tokens": 32000,
 289      "can_reason": true,
 290      "reasoning_levels": [
 291        "low",
 292        "medium",
 293        "high"
 294      ],
 295      "default_reasoning_effort": "medium",
 296      "supports_attachments": true,
 297      "options": {}
 298    },
 299    {
 300      "id": "anthropic/claude-sonnet-4",
 301      "name": "Anthropic: Claude Sonnet 4",
 302      "cost_per_1m_in": 3,
 303      "cost_per_1m_out": 15,
 304      "cost_per_1m_in_cached": 3.75,
 305      "cost_per_1m_out_cached": 0.3,
 306      "context_window": 1000000,
 307      "default_max_tokens": 32000,
 308      "can_reason": true,
 309      "reasoning_levels": [
 310        "low",
 311        "medium",
 312        "high"
 313      ],
 314      "default_reasoning_effort": "medium",
 315      "supports_attachments": true,
 316      "options": {}
 317    },
 318    {
 319      "id": "anthropic/claude-sonnet-4.5",
 320      "name": "Anthropic: Claude Sonnet 4.5",
 321      "cost_per_1m_in": 3,
 322      "cost_per_1m_out": 15,
 323      "cost_per_1m_in_cached": 3.75,
 324      "cost_per_1m_out_cached": 0.3,
 325      "context_window": 1000000,
 326      "default_max_tokens": 32000,
 327      "can_reason": true,
 328      "reasoning_levels": [
 329        "low",
 330        "medium",
 331        "high"
 332      ],
 333      "default_reasoning_effort": "medium",
 334      "supports_attachments": true,
 335      "options": {}
 336    },
 337    {
 338      "id": "arcee-ai/trinity-mini",
 339      "name": "Arcee AI: Trinity Mini",
 340      "cost_per_1m_in": 0.045,
 341      "cost_per_1m_out": 0.15,
 342      "cost_per_1m_in_cached": 0,
 343      "cost_per_1m_out_cached": 0,
 344      "context_window": 131072,
 345      "default_max_tokens": 65536,
 346      "can_reason": true,
 347      "reasoning_levels": [
 348        "low",
 349        "medium",
 350        "high"
 351      ],
 352      "default_reasoning_effort": "medium",
 353      "supports_attachments": false,
 354      "options": {}
 355    },
 356    {
 357      "id": "arcee-ai/trinity-mini:free",
 358      "name": "Arcee AI: Trinity Mini (free)",
 359      "cost_per_1m_in": 0,
 360      "cost_per_1m_out": 0,
 361      "cost_per_1m_in_cached": 0,
 362      "cost_per_1m_out_cached": 0,
 363      "context_window": 131072,
 364      "default_max_tokens": 13107,
 365      "can_reason": true,
 366      "reasoning_levels": [
 367        "low",
 368        "medium",
 369        "high"
 370      ],
 371      "default_reasoning_effort": "medium",
 372      "supports_attachments": false,
 373      "options": {}
 374    },
 375    {
 376      "id": "arcee-ai/virtuoso-large",
 377      "name": "Arcee AI: Virtuoso Large",
 378      "cost_per_1m_in": 0.75,
 379      "cost_per_1m_out": 1.2,
 380      "cost_per_1m_in_cached": 0,
 381      "cost_per_1m_out_cached": 0,
 382      "context_window": 131072,
 383      "default_max_tokens": 32000,
 384      "can_reason": false,
 385      "supports_attachments": false,
 386      "options": {}
 387    },
 388    {
 389      "id": "baidu/ernie-4.5-21b-a3b",
 390      "name": "Baidu: ERNIE 4.5 21B A3B",
 391      "cost_per_1m_in": 0.07,
 392      "cost_per_1m_out": 0.28,
 393      "cost_per_1m_in_cached": 0,
 394      "cost_per_1m_out_cached": 0,
 395      "context_window": 120000,
 396      "default_max_tokens": 4000,
 397      "can_reason": false,
 398      "supports_attachments": false,
 399      "options": {}
 400    },
 401    {
 402      "id": "baidu/ernie-4.5-vl-28b-a3b",
 403      "name": "Baidu: ERNIE 4.5 VL 28B A3B",
 404      "cost_per_1m_in": 0.14,
 405      "cost_per_1m_out": 0.56,
 406      "cost_per_1m_in_cached": 0,
 407      "cost_per_1m_out_cached": 0,
 408      "context_window": 30000,
 409      "default_max_tokens": 4000,
 410      "can_reason": true,
 411      "reasoning_levels": [
 412        "low",
 413        "medium",
 414        "high"
 415      ],
 416      "default_reasoning_effort": "medium",
 417      "supports_attachments": true,
 418      "options": {}
 419    },
 420    {
 421      "id": "bytedance-seed/seed-1.6",
 422      "name": "ByteDance Seed: Seed 1.6",
 423      "cost_per_1m_in": 0.25,
 424      "cost_per_1m_out": 2,
 425      "cost_per_1m_in_cached": 0,
 426      "cost_per_1m_out_cached": 0,
 427      "context_window": 262144,
 428      "default_max_tokens": 16384,
 429      "can_reason": true,
 430      "reasoning_levels": [
 431        "low",
 432        "medium",
 433        "high"
 434      ],
 435      "default_reasoning_effort": "medium",
 436      "supports_attachments": true,
 437      "options": {}
 438    },
 439    {
 440      "id": "bytedance-seed/seed-1.6-flash",
 441      "name": "ByteDance Seed: Seed 1.6 Flash",
 442      "cost_per_1m_in": 0.075,
 443      "cost_per_1m_out": 0.3,
 444      "cost_per_1m_in_cached": 0,
 445      "cost_per_1m_out_cached": 0,
 446      "context_window": 262144,
 447      "default_max_tokens": 8192,
 448      "can_reason": true,
 449      "reasoning_levels": [
 450        "low",
 451        "medium",
 452        "high"
 453      ],
 454      "default_reasoning_effort": "medium",
 455      "supports_attachments": true,
 456      "options": {}
 457    },
 458    {
 459      "id": "deepcogito/cogito-v2-preview-llama-109b-moe",
 460      "name": "Cogito V2 Preview Llama 109B",
 461      "cost_per_1m_in": 0.18,
 462      "cost_per_1m_out": 0.59,
 463      "cost_per_1m_in_cached": 0,
 464      "cost_per_1m_out_cached": 0,
 465      "context_window": 32767,
 466      "default_max_tokens": 3276,
 467      "can_reason": true,
 468      "reasoning_levels": [
 469        "low",
 470        "medium",
 471        "high"
 472      ],
 473      "default_reasoning_effort": "medium",
 474      "supports_attachments": true,
 475      "options": {}
 476    },
 477    {
 478      "id": "cohere/command-r-08-2024",
 479      "name": "Cohere: Command R (08-2024)",
 480      "cost_per_1m_in": 0.15,
 481      "cost_per_1m_out": 0.6,
 482      "cost_per_1m_in_cached": 0,
 483      "cost_per_1m_out_cached": 0,
 484      "context_window": 128000,
 485      "default_max_tokens": 2000,
 486      "can_reason": false,
 487      "supports_attachments": false,
 488      "options": {}
 489    },
 490    {
 491      "id": "cohere/command-r-plus-08-2024",
 492      "name": "Cohere: Command R+ (08-2024)",
 493      "cost_per_1m_in": 2.5,
 494      "cost_per_1m_out": 10,
 495      "cost_per_1m_in_cached": 0,
 496      "cost_per_1m_out_cached": 0,
 497      "context_window": 128000,
 498      "default_max_tokens": 2000,
 499      "can_reason": false,
 500      "supports_attachments": false,
 501      "options": {}
 502    },
 503    {
 504      "id": "deepcogito/cogito-v2-preview-llama-405b",
 505      "name": "Deep Cogito: Cogito V2 Preview Llama 405B",
 506      "cost_per_1m_in": 3.5,
 507      "cost_per_1m_out": 3.5,
 508      "cost_per_1m_in_cached": 0,
 509      "cost_per_1m_out_cached": 0,
 510      "context_window": 32768,
 511      "default_max_tokens": 3276,
 512      "can_reason": true,
 513      "reasoning_levels": [
 514        "low",
 515        "medium",
 516        "high"
 517      ],
 518      "default_reasoning_effort": "medium",
 519      "supports_attachments": false,
 520      "options": {}
 521    },
 522    {
 523      "id": "deepcogito/cogito-v2-preview-llama-70b",
 524      "name": "Deep Cogito: Cogito V2 Preview Llama 70B",
 525      "cost_per_1m_in": 0.88,
 526      "cost_per_1m_out": 0.88,
 527      "cost_per_1m_in_cached": 0,
 528      "cost_per_1m_out_cached": 0,
 529      "context_window": 32768,
 530      "default_max_tokens": 3276,
 531      "can_reason": true,
 532      "reasoning_levels": [
 533        "low",
 534        "medium",
 535        "high"
 536      ],
 537      "default_reasoning_effort": "medium",
 538      "supports_attachments": false,
 539      "options": {}
 540    },
 541    {
 542      "id": "deepseek/deepseek-chat",
 543      "name": "DeepSeek: DeepSeek V3",
 544      "cost_per_1m_in": 0.39999999999999997,
 545      "cost_per_1m_out": 1.3,
 546      "cost_per_1m_in_cached": 0,
 547      "cost_per_1m_out_cached": 0,
 548      "context_window": 64000,
 549      "default_max_tokens": 8000,
 550      "can_reason": false,
 551      "supports_attachments": false,
 552      "options": {}
 553    },
 554    {
 555      "id": "deepseek/deepseek-chat-v3-0324",
 556      "name": "DeepSeek: DeepSeek V3 0324",
 557      "cost_per_1m_in": 0.77,
 558      "cost_per_1m_out": 0.77,
 559      "cost_per_1m_in_cached": 0,
 560      "cost_per_1m_out_cached": 0,
 561      "context_window": 163840,
 562      "default_max_tokens": 65536,
 563      "can_reason": true,
 564      "reasoning_levels": [
 565        "low",
 566        "medium",
 567        "high"
 568      ],
 569      "default_reasoning_effort": "medium",
 570      "supports_attachments": false,
 571      "options": {}
 572    },
 573    {
 574      "id": "deepseek/deepseek-chat-v3.1",
 575      "name": "DeepSeek: DeepSeek V3.1",
 576      "cost_per_1m_in": 0.21,
 577      "cost_per_1m_out": 0.7899999999999999,
 578      "cost_per_1m_in_cached": 0,
 579      "cost_per_1m_out_cached": 0.16799999999999998,
 580      "context_window": 163840,
 581      "default_max_tokens": 16384,
 582      "can_reason": true,
 583      "reasoning_levels": [
 584        "low",
 585        "medium",
 586        "high"
 587      ],
 588      "default_reasoning_effort": "medium",
 589      "supports_attachments": false,
 590      "options": {}
 591    },
 592    {
 593      "id": "deepseek/deepseek-v3.1-terminus",
 594      "name": "DeepSeek: DeepSeek V3.1 Terminus",
 595      "cost_per_1m_in": 0.21,
 596      "cost_per_1m_out": 0.7899999999999999,
 597      "cost_per_1m_in_cached": 0,
 598      "cost_per_1m_out_cached": 0.16799999999999998,
 599      "context_window": 163840,
 600      "default_max_tokens": 16384,
 601      "can_reason": true,
 602      "reasoning_levels": [
 603        "low",
 604        "medium",
 605        "high"
 606      ],
 607      "default_reasoning_effort": "medium",
 608      "supports_attachments": false,
 609      "options": {}
 610    },
 611    {
 612      "id": "deepseek/deepseek-v3.1-terminus:exacto",
 613      "name": "DeepSeek: DeepSeek V3.1 Terminus (exacto)",
 614      "cost_per_1m_in": 0.21,
 615      "cost_per_1m_out": 0.7899999999999999,
 616      "cost_per_1m_in_cached": 0,
 617      "cost_per_1m_out_cached": 0.16799999999999998,
 618      "context_window": 163840,
 619      "default_max_tokens": 16384,
 620      "can_reason": true,
 621      "reasoning_levels": [
 622        "low",
 623        "medium",
 624        "high"
 625      ],
 626      "default_reasoning_effort": "medium",
 627      "supports_attachments": false,
 628      "options": {}
 629    },
 630    {
 631      "id": "deepseek/deepseek-v3.2",
 632      "name": "DeepSeek: DeepSeek V3.2",
 633      "cost_per_1m_in": 0.26,
 634      "cost_per_1m_out": 0.39,
 635      "cost_per_1m_in_cached": 0,
 636      "cost_per_1m_out_cached": 0.13,
 637      "context_window": 163840,
 638      "default_max_tokens": 16384,
 639      "can_reason": true,
 640      "reasoning_levels": [
 641        "low",
 642        "medium",
 643        "high"
 644      ],
 645      "default_reasoning_effort": "medium",
 646      "supports_attachments": false,
 647      "options": {}
 648    },
 649    {
 650      "id": "deepseek/deepseek-v3.2-exp",
 651      "name": "DeepSeek: DeepSeek V3.2 Exp",
 652      "cost_per_1m_in": 0.21,
 653      "cost_per_1m_out": 0.32,
 654      "cost_per_1m_in_cached": 0,
 655      "cost_per_1m_out_cached": 0.16799999999999998,
 656      "context_window": 163840,
 657      "default_max_tokens": 16384,
 658      "can_reason": true,
 659      "reasoning_levels": [
 660        "low",
 661        "medium",
 662        "high"
 663      ],
 664      "default_reasoning_effort": "medium",
 665      "supports_attachments": false,
 666      "options": {}
 667    },
 668    {
 669      "id": "deepseek/deepseek-r1",
 670      "name": "DeepSeek: R1",
 671      "cost_per_1m_in": 0.7,
 672      "cost_per_1m_out": 2.4,
 673      "cost_per_1m_in_cached": 0,
 674      "cost_per_1m_out_cached": 0,
 675      "context_window": 163840,
 676      "default_max_tokens": 81920,
 677      "can_reason": true,
 678      "reasoning_levels": [
 679        "low",
 680        "medium",
 681        "high"
 682      ],
 683      "default_reasoning_effort": "medium",
 684      "supports_attachments": false,
 685      "options": {}
 686    },
 687    {
 688      "id": "deepseek/deepseek-r1-0528",
 689      "name": "DeepSeek: R1 0528",
 690      "cost_per_1m_in": 0.7,
 691      "cost_per_1m_out": 2.5,
 692      "cost_per_1m_in_cached": 0,
 693      "cost_per_1m_out_cached": 0.35,
 694      "context_window": 163840,
 695      "default_max_tokens": 16384,
 696      "can_reason": true,
 697      "reasoning_levels": [
 698        "low",
 699        "medium",
 700        "high"
 701      ],
 702      "default_reasoning_effort": "medium",
 703      "supports_attachments": false,
 704      "options": {}
 705    },
 706    {
 707      "id": "deepseek/deepseek-r1-distill-llama-70b",
 708      "name": "DeepSeek: R1 Distill Llama 70B",
 709      "cost_per_1m_in": 0.03,
 710      "cost_per_1m_out": 0.11,
 711      "cost_per_1m_in_cached": 0,
 712      "cost_per_1m_out_cached": 0,
 713      "context_window": 131072,
 714      "default_max_tokens": 65536,
 715      "can_reason": true,
 716      "reasoning_levels": [
 717        "low",
 718        "medium",
 719        "high"
 720      ],
 721      "default_reasoning_effort": "medium",
 722      "supports_attachments": false,
 723      "options": {}
 724    },
 725    {
 726      "id": "google/gemini-2.0-flash-001",
 727      "name": "Google: Gemini 2.0 Flash",
 728      "cost_per_1m_in": 0.09999999999999999,
 729      "cost_per_1m_out": 0.39999999999999997,
 730      "cost_per_1m_in_cached": 0.18330000000000002,
 731      "cost_per_1m_out_cached": 0.024999999999999998,
 732      "context_window": 1048576,
 733      "default_max_tokens": 4096,
 734      "can_reason": false,
 735      "supports_attachments": true,
 736      "options": {}
 737    },
 738    {
 739      "id": "google/gemini-2.0-flash-exp:free",
 740      "name": "Google: Gemini 2.0 Flash Experimental (free)",
 741      "cost_per_1m_in": 0,
 742      "cost_per_1m_out": 0,
 743      "cost_per_1m_in_cached": 0,
 744      "cost_per_1m_out_cached": 0,
 745      "context_window": 1048576,
 746      "default_max_tokens": 4096,
 747      "can_reason": false,
 748      "supports_attachments": true,
 749      "options": {}
 750    },
 751    {
 752      "id": "google/gemini-2.0-flash-lite-001",
 753      "name": "Google: Gemini 2.0 Flash Lite",
 754      "cost_per_1m_in": 0.075,
 755      "cost_per_1m_out": 0.3,
 756      "cost_per_1m_in_cached": 0,
 757      "cost_per_1m_out_cached": 0,
 758      "context_window": 1048576,
 759      "default_max_tokens": 4096,
 760      "can_reason": false,
 761      "supports_attachments": true,
 762      "options": {}
 763    },
 764    {
 765      "id": "google/gemini-2.5-flash",
 766      "name": "Google: Gemini 2.5 Flash",
 767      "cost_per_1m_in": 0.3,
 768      "cost_per_1m_out": 2.5,
 769      "cost_per_1m_in_cached": 0.3833,
 770      "cost_per_1m_out_cached": 0.03,
 771      "context_window": 1048576,
 772      "default_max_tokens": 32767,
 773      "can_reason": true,
 774      "reasoning_levels": [
 775        "low",
 776        "medium",
 777        "high"
 778      ],
 779      "default_reasoning_effort": "medium",
 780      "supports_attachments": true,
 781      "options": {}
 782    },
 783    {
 784      "id": "google/gemini-2.5-flash-lite",
 785      "name": "Google: Gemini 2.5 Flash Lite",
 786      "cost_per_1m_in": 0.09999999999999999,
 787      "cost_per_1m_out": 0.39999999999999997,
 788      "cost_per_1m_in_cached": 0.18330000000000002,
 789      "cost_per_1m_out_cached": 0.01,
 790      "context_window": 1048576,
 791      "default_max_tokens": 32767,
 792      "can_reason": true,
 793      "reasoning_levels": [
 794        "low",
 795        "medium",
 796        "high"
 797      ],
 798      "default_reasoning_effort": "medium",
 799      "supports_attachments": true,
 800      "options": {}
 801    },
 802    {
 803      "id": "google/gemini-2.5-flash-lite-preview-09-2025",
 804      "name": "Google: Gemini 2.5 Flash Lite Preview 09-2025",
 805      "cost_per_1m_in": 0.09999999999999999,
 806      "cost_per_1m_out": 0.39999999999999997,
 807      "cost_per_1m_in_cached": 0,
 808      "cost_per_1m_out_cached": 0,
 809      "context_window": 1048576,
 810      "default_max_tokens": 32767,
 811      "can_reason": true,
 812      "reasoning_levels": [
 813        "low",
 814        "medium",
 815        "high"
 816      ],
 817      "default_reasoning_effort": "medium",
 818      "supports_attachments": true,
 819      "options": {}
 820    },
 821    {
 822      "id": "google/gemini-2.5-flash-preview-09-2025",
 823      "name": "Google: Gemini 2.5 Flash Preview 09-2025",
 824      "cost_per_1m_in": 0.3,
 825      "cost_per_1m_out": 2.5,
 826      "cost_per_1m_in_cached": 0.3833,
 827      "cost_per_1m_out_cached": 0.075,
 828      "context_window": 1048576,
 829      "default_max_tokens": 32767,
 830      "can_reason": true,
 831      "reasoning_levels": [
 832        "low",
 833        "medium",
 834        "high"
 835      ],
 836      "default_reasoning_effort": "medium",
 837      "supports_attachments": true,
 838      "options": {}
 839    },
 840    {
 841      "id": "google/gemini-2.5-pro",
 842      "name": "Google: Gemini 2.5 Pro",
 843      "cost_per_1m_in": 1.25,
 844      "cost_per_1m_out": 10,
 845      "cost_per_1m_in_cached": 1.625,
 846      "cost_per_1m_out_cached": 0.125,
 847      "context_window": 1048576,
 848      "default_max_tokens": 32768,
 849      "can_reason": true,
 850      "reasoning_levels": [
 851        "low",
 852        "medium",
 853        "high"
 854      ],
 855      "default_reasoning_effort": "medium",
 856      "supports_attachments": true,
 857      "options": {}
 858    },
 859    {
 860      "id": "google/gemini-2.5-pro-preview-05-06",
 861      "name": "Google: Gemini 2.5 Pro Preview 05-06",
 862      "cost_per_1m_in": 1.25,
 863      "cost_per_1m_out": 10,
 864      "cost_per_1m_in_cached": 1.625,
 865      "cost_per_1m_out_cached": 0.125,
 866      "context_window": 1048576,
 867      "default_max_tokens": 32768,
 868      "can_reason": true,
 869      "reasoning_levels": [
 870        "low",
 871        "medium",
 872        "high"
 873      ],
 874      "default_reasoning_effort": "medium",
 875      "supports_attachments": true,
 876      "options": {}
 877    },
 878    {
 879      "id": "google/gemini-2.5-pro-preview",
 880      "name": "Google: Gemini 2.5 Pro Preview 06-05",
 881      "cost_per_1m_in": 1.25,
 882      "cost_per_1m_out": 10,
 883      "cost_per_1m_in_cached": 1.625,
 884      "cost_per_1m_out_cached": 0.125,
 885      "context_window": 1048576,
 886      "default_max_tokens": 32768,
 887      "can_reason": true,
 888      "reasoning_levels": [
 889        "low",
 890        "medium",
 891        "high"
 892      ],
 893      "default_reasoning_effort": "medium",
 894      "supports_attachments": true,
 895      "options": {}
 896    },
 897    {
 898      "id": "google/gemini-3-flash-preview",
 899      "name": "Google: Gemini 3 Flash Preview",
 900      "cost_per_1m_in": 0.5,
 901      "cost_per_1m_out": 3,
 902      "cost_per_1m_in_cached": 0,
 903      "cost_per_1m_out_cached": 0.049999999999999996,
 904      "context_window": 1048576,
 905      "default_max_tokens": 32767,
 906      "can_reason": true,
 907      "reasoning_levels": [
 908        "low",
 909        "medium",
 910        "high"
 911      ],
 912      "default_reasoning_effort": "medium",
 913      "supports_attachments": true,
 914      "options": {}
 915    },
 916    {
 917      "id": "google/gemini-3-pro-preview",
 918      "name": "Google: Gemini 3 Pro Preview",
 919      "cost_per_1m_in": 2,
 920      "cost_per_1m_out": 12,
 921      "cost_per_1m_in_cached": 2.375,
 922      "cost_per_1m_out_cached": 0.19999999999999998,
 923      "context_window": 1048576,
 924      "default_max_tokens": 32768,
 925      "can_reason": true,
 926      "reasoning_levels": [
 927        "low",
 928        "medium",
 929        "high"
 930      ],
 931      "default_reasoning_effort": "medium",
 932      "supports_attachments": true,
 933      "options": {}
 934    },
 935    {
 936      "id": "google/gemma-3-27b-it:free",
 937      "name": "Google: Gemma 3 27B (free)",
 938      "cost_per_1m_in": 0,
 939      "cost_per_1m_out": 0,
 940      "cost_per_1m_in_cached": 0,
 941      "cost_per_1m_out_cached": 0,
 942      "context_window": 128000,
 943      "default_max_tokens": 12800,
 944      "can_reason": false,
 945      "supports_attachments": true,
 946      "options": {}
 947    },
 948    {
 949      "id": "inception/mercury",
 950      "name": "Inception: Mercury",
 951      "cost_per_1m_in": 0.25,
 952      "cost_per_1m_out": 1,
 953      "cost_per_1m_in_cached": 0,
 954      "cost_per_1m_out_cached": 0,
 955      "context_window": 128000,
 956      "default_max_tokens": 8192,
 957      "can_reason": false,
 958      "supports_attachments": false,
 959      "options": {}
 960    },
 961    {
 962      "id": "inception/mercury-coder",
 963      "name": "Inception: Mercury Coder",
 964      "cost_per_1m_in": 0.25,
 965      "cost_per_1m_out": 1,
 966      "cost_per_1m_in_cached": 0,
 967      "cost_per_1m_out_cached": 0,
 968      "context_window": 128000,
 969      "default_max_tokens": 8192,
 970      "can_reason": false,
 971      "supports_attachments": false,
 972      "options": {}
 973    },
 974    {
 975      "id": "kwaipilot/kat-coder-pro",
 976      "name": "Kwaipilot: KAT-Coder-Pro V1",
 977      "cost_per_1m_in": 0.207,
 978      "cost_per_1m_out": 0.828,
 979      "cost_per_1m_in_cached": 0,
 980      "cost_per_1m_out_cached": 0.0414,
 981      "context_window": 256000,
 982      "default_max_tokens": 64000,
 983      "can_reason": false,
 984      "supports_attachments": false,
 985      "options": {}
 986    },
 987    {
 988      "id": "kwaipilot/kat-coder-pro:free",
 989      "name": "Kwaipilot: KAT-Coder-Pro V1 (free)",
 990      "cost_per_1m_in": 0,
 991      "cost_per_1m_out": 0,
 992      "cost_per_1m_in_cached": 0,
 993      "cost_per_1m_out_cached": 0,
 994      "context_window": 256000,
 995      "default_max_tokens": 16000,
 996      "can_reason": false,
 997      "supports_attachments": false,
 998      "options": {}
 999    },
1000    {
1001      "id": "meta-llama/llama-3.1-70b-instruct",
1002      "name": "Meta: Llama 3.1 70B Instruct",
1003      "cost_per_1m_in": 0.39999999999999997,
1004      "cost_per_1m_out": 0.39999999999999997,
1005      "cost_per_1m_in_cached": 0,
1006      "cost_per_1m_out_cached": 0,
1007      "context_window": 131072,
1008      "default_max_tokens": 13107,
1009      "can_reason": false,
1010      "supports_attachments": false,
1011      "options": {}
1012    },
1013    {
1014      "id": "meta-llama/llama-3.1-8b-instruct",
1015      "name": "Meta: Llama 3.1 8B Instruct",
1016      "cost_per_1m_in": 0.02,
1017      "cost_per_1m_out": 0.06,
1018      "cost_per_1m_in_cached": 0,
1019      "cost_per_1m_out_cached": 0,
1020      "context_window": 131072,
1021      "default_max_tokens": 13107,
1022      "can_reason": false,
1023      "supports_attachments": false,
1024      "options": {}
1025    },
1026    {
1027      "id": "meta-llama/llama-3.2-3b-instruct",
1028      "name": "Meta: Llama 3.2 3B Instruct",
1029      "cost_per_1m_in": 0.03,
1030      "cost_per_1m_out": 0.049999999999999996,
1031      "cost_per_1m_in_cached": 0,
1032      "cost_per_1m_out_cached": 0,
1033      "context_window": 32768,
1034      "default_max_tokens": 16000,
1035      "can_reason": false,
1036      "supports_attachments": false,
1037      "options": {}
1038    },
1039    {
1040      "id": "meta-llama/llama-3.3-70b-instruct",
1041      "name": "Meta: Llama 3.3 70B Instruct",
1042      "cost_per_1m_in": 0.25,
1043      "cost_per_1m_out": 0.75,
1044      "cost_per_1m_in_cached": 0,
1045      "cost_per_1m_out_cached": 0,
1046      "context_window": 131072,
1047      "default_max_tokens": 13107,
1048      "can_reason": false,
1049      "supports_attachments": false,
1050      "options": {}
1051    },
1052    {
1053      "id": "meta-llama/llama-3.3-70b-instruct:free",
1054      "name": "Meta: Llama 3.3 70B Instruct (free)",
1055      "cost_per_1m_in": 0,
1056      "cost_per_1m_out": 0,
1057      "cost_per_1m_in_cached": 0,
1058      "cost_per_1m_out_cached": 0,
1059      "context_window": 131072,
1060      "default_max_tokens": 13107,
1061      "can_reason": false,
1062      "supports_attachments": false,
1063      "options": {}
1064    },
1065    {
1066      "id": "meta-llama/llama-4-maverick",
1067      "name": "Meta: Llama 4 Maverick",
1068      "cost_per_1m_in": 0.27,
1069      "cost_per_1m_out": 0.85,
1070      "cost_per_1m_in_cached": 0,
1071      "cost_per_1m_out_cached": 0,
1072      "context_window": 1048576,
1073      "default_max_tokens": 104857,
1074      "can_reason": false,
1075      "supports_attachments": true,
1076      "options": {}
1077    },
1078    {
1079      "id": "meta-llama/llama-4-scout",
1080      "name": "Meta: Llama 4 Scout",
1081      "cost_per_1m_in": 0.25,
1082      "cost_per_1m_out": 0.7,
1083      "cost_per_1m_in_cached": 0,
1084      "cost_per_1m_out_cached": 0,
1085      "context_window": 1310720,
1086      "default_max_tokens": 4096,
1087      "can_reason": false,
1088      "supports_attachments": true,
1089      "options": {}
1090    },
1091    {
1092      "id": "microsoft/phi-3-medium-128k-instruct",
1093      "name": "Microsoft: Phi-3 Medium 128K Instruct",
1094      "cost_per_1m_in": 1,
1095      "cost_per_1m_out": 1,
1096      "cost_per_1m_in_cached": 0,
1097      "cost_per_1m_out_cached": 0,
1098      "context_window": 128000,
1099      "default_max_tokens": 12800,
1100      "can_reason": false,
1101      "supports_attachments": false,
1102      "options": {}
1103    },
1104    {
1105      "id": "microsoft/phi-3-mini-128k-instruct",
1106      "name": "Microsoft: Phi-3 Mini 128K Instruct",
1107      "cost_per_1m_in": 0.09999999999999999,
1108      "cost_per_1m_out": 0.09999999999999999,
1109      "cost_per_1m_in_cached": 0,
1110      "cost_per_1m_out_cached": 0,
1111      "context_window": 128000,
1112      "default_max_tokens": 12800,
1113      "can_reason": false,
1114      "supports_attachments": false,
1115      "options": {}
1116    },
1117    {
1118      "id": "microsoft/phi-3.5-mini-128k-instruct",
1119      "name": "Microsoft: Phi-3.5 Mini 128K Instruct",
1120      "cost_per_1m_in": 0.09999999999999999,
1121      "cost_per_1m_out": 0.09999999999999999,
1122      "cost_per_1m_in_cached": 0,
1123      "cost_per_1m_out_cached": 0,
1124      "context_window": 128000,
1125      "default_max_tokens": 12800,
1126      "can_reason": false,
1127      "supports_attachments": false,
1128      "options": {}
1129    },
1130    {
1131      "id": "minimax/minimax-m2",
1132      "name": "MiniMax: MiniMax M2",
1133      "cost_per_1m_in": 0.254,
1134      "cost_per_1m_out": 1.02,
1135      "cost_per_1m_in_cached": 0,
1136      "cost_per_1m_out_cached": 0.127,
1137      "context_window": 262144,
1138      "default_max_tokens": 26214,
1139      "can_reason": true,
1140      "reasoning_levels": [
1141        "low",
1142        "medium",
1143        "high"
1144      ],
1145      "default_reasoning_effort": "medium",
1146      "supports_attachments": false,
1147      "options": {}
1148    },
1149    {
1150      "id": "minimax/minimax-m2.1",
1151      "name": "MiniMax: MiniMax M2.1",
1152      "cost_per_1m_in": 0.3,
1153      "cost_per_1m_out": 1.2,
1154      "cost_per_1m_in_cached": 0.375,
1155      "cost_per_1m_out_cached": 0.03,
1156      "context_window": 204800,
1157      "default_max_tokens": 65536,
1158      "can_reason": true,
1159      "reasoning_levels": [
1160        "low",
1161        "medium",
1162        "high"
1163      ],
1164      "default_reasoning_effort": "medium",
1165      "supports_attachments": false,
1166      "options": {}
1167    },
1168    {
1169      "id": "mistralai/mistral-large",
1170      "name": "Mistral Large",
1171      "cost_per_1m_in": 2,
1172      "cost_per_1m_out": 6,
1173      "cost_per_1m_in_cached": 0,
1174      "cost_per_1m_out_cached": 0,
1175      "context_window": 128000,
1176      "default_max_tokens": 12800,
1177      "can_reason": false,
1178      "supports_attachments": false,
1179      "options": {}
1180    },
1181    {
1182      "id": "mistralai/mistral-large-2407",
1183      "name": "Mistral Large 2407",
1184      "cost_per_1m_in": 2,
1185      "cost_per_1m_out": 6,
1186      "cost_per_1m_in_cached": 0,
1187      "cost_per_1m_out_cached": 0,
1188      "context_window": 131072,
1189      "default_max_tokens": 13107,
1190      "can_reason": false,
1191      "supports_attachments": false,
1192      "options": {}
1193    },
1194    {
1195      "id": "mistralai/mistral-large-2411",
1196      "name": "Mistral Large 2411",
1197      "cost_per_1m_in": 2,
1198      "cost_per_1m_out": 6,
1199      "cost_per_1m_in_cached": 0,
1200      "cost_per_1m_out_cached": 0,
1201      "context_window": 131072,
1202      "default_max_tokens": 13107,
1203      "can_reason": false,
1204      "supports_attachments": false,
1205      "options": {}
1206    },
1207    {
1208      "id": "mistralai/mistral-tiny",
1209      "name": "Mistral Tiny",
1210      "cost_per_1m_in": 0.25,
1211      "cost_per_1m_out": 0.25,
1212      "cost_per_1m_in_cached": 0,
1213      "cost_per_1m_out_cached": 0,
1214      "context_window": 32768,
1215      "default_max_tokens": 3276,
1216      "can_reason": false,
1217      "supports_attachments": false,
1218      "options": {}
1219    },
1220    {
1221      "id": "mistralai/codestral-2508",
1222      "name": "Mistral: Codestral 2508",
1223      "cost_per_1m_in": 0.3,
1224      "cost_per_1m_out": 0.8999999999999999,
1225      "cost_per_1m_in_cached": 0,
1226      "cost_per_1m_out_cached": 0,
1227      "context_window": 256000,
1228      "default_max_tokens": 25600,
1229      "can_reason": false,
1230      "supports_attachments": false,
1231      "options": {}
1232    },
1233    {
1234      "id": "mistralai/devstral-2512",
1235      "name": "Mistral: Devstral 2 2512",
1236      "cost_per_1m_in": 0.049999999999999996,
1237      "cost_per_1m_out": 0.22,
1238      "cost_per_1m_in_cached": 0,
1239      "cost_per_1m_out_cached": 0,
1240      "context_window": 262144,
1241      "default_max_tokens": 32768,
1242      "can_reason": false,
1243      "supports_attachments": false,
1244      "options": {}
1245    },
1246    {
1247      "id": "mistralai/devstral-2512:free",
1248      "name": "Mistral: Devstral 2 2512 (free)",
1249      "cost_per_1m_in": 0,
1250      "cost_per_1m_out": 0,
1251      "cost_per_1m_in_cached": 0,
1252      "cost_per_1m_out_cached": 0,
1253      "context_window": 262144,
1254      "default_max_tokens": 26214,
1255      "can_reason": false,
1256      "supports_attachments": false,
1257      "options": {}
1258    },
1259    {
1260      "id": "mistralai/devstral-medium",
1261      "name": "Mistral: Devstral Medium",
1262      "cost_per_1m_in": 0.39999999999999997,
1263      "cost_per_1m_out": 2,
1264      "cost_per_1m_in_cached": 0,
1265      "cost_per_1m_out_cached": 0,
1266      "context_window": 131072,
1267      "default_max_tokens": 13107,
1268      "can_reason": false,
1269      "supports_attachments": false,
1270      "options": {}
1271    },
1272    {
1273      "id": "mistralai/devstral-small",
1274      "name": "Mistral: Devstral Small 1.1",
1275      "cost_per_1m_in": 0.09999999999999999,
1276      "cost_per_1m_out": 0.3,
1277      "cost_per_1m_in_cached": 0,
1278      "cost_per_1m_out_cached": 0,
1279      "context_window": 131072,
1280      "default_max_tokens": 13107,
1281      "can_reason": false,
1282      "supports_attachments": false,
1283      "options": {}
1284    },
1285    {
1286      "id": "mistralai/ministral-14b-2512",
1287      "name": "Mistral: Ministral 3 14B 2512",
1288      "cost_per_1m_in": 0.19999999999999998,
1289      "cost_per_1m_out": 0.19999999999999998,
1290      "cost_per_1m_in_cached": 0,
1291      "cost_per_1m_out_cached": 0,
1292      "context_window": 262144,
1293      "default_max_tokens": 26214,
1294      "can_reason": false,
1295      "supports_attachments": true,
1296      "options": {}
1297    },
1298    {
1299      "id": "mistralai/ministral-3b-2512",
1300      "name": "Mistral: Ministral 3 3B 2512",
1301      "cost_per_1m_in": 0.09999999999999999,
1302      "cost_per_1m_out": 0.09999999999999999,
1303      "cost_per_1m_in_cached": 0,
1304      "cost_per_1m_out_cached": 0,
1305      "context_window": 131072,
1306      "default_max_tokens": 13107,
1307      "can_reason": false,
1308      "supports_attachments": true,
1309      "options": {}
1310    },
1311    {
1312      "id": "mistralai/ministral-8b-2512",
1313      "name": "Mistral: Ministral 3 8B 2512",
1314      "cost_per_1m_in": 0.15,
1315      "cost_per_1m_out": 0.15,
1316      "cost_per_1m_in_cached": 0,
1317      "cost_per_1m_out_cached": 0,
1318      "context_window": 262144,
1319      "default_max_tokens": 26214,
1320      "can_reason": false,
1321      "supports_attachments": true,
1322      "options": {}
1323    },
1324    {
1325      "id": "mistralai/ministral-3b",
1326      "name": "Mistral: Ministral 3B",
1327      "cost_per_1m_in": 0.04,
1328      "cost_per_1m_out": 0.04,
1329      "cost_per_1m_in_cached": 0,
1330      "cost_per_1m_out_cached": 0,
1331      "context_window": 131072,
1332      "default_max_tokens": 13107,
1333      "can_reason": false,
1334      "supports_attachments": false,
1335      "options": {}
1336    },
1337    {
1338      "id": "mistralai/ministral-8b",
1339      "name": "Mistral: Ministral 8B",
1340      "cost_per_1m_in": 0.09999999999999999,
1341      "cost_per_1m_out": 0.09999999999999999,
1342      "cost_per_1m_in_cached": 0,
1343      "cost_per_1m_out_cached": 0,
1344      "context_window": 131072,
1345      "default_max_tokens": 13107,
1346      "can_reason": false,
1347      "supports_attachments": false,
1348      "options": {}
1349    },
1350    {
1351      "id": "mistralai/mistral-7b-instruct",
1352      "name": "Mistral: Mistral 7B Instruct",
1353      "cost_per_1m_in": 0.028,
1354      "cost_per_1m_out": 0.054,
1355      "cost_per_1m_in_cached": 0,
1356      "cost_per_1m_out_cached": 0,
1357      "context_window": 32768,
1358      "default_max_tokens": 8192,
1359      "can_reason": false,
1360      "supports_attachments": false,
1361      "options": {}
1362    },
1363    {
1364      "id": "mistralai/mistral-7b-instruct:free",
1365      "name": "Mistral: Mistral 7B Instruct (free)",
1366      "cost_per_1m_in": 0,
1367      "cost_per_1m_out": 0,
1368      "cost_per_1m_in_cached": 0,
1369      "cost_per_1m_out_cached": 0,
1370      "context_window": 32768,
1371      "default_max_tokens": 8192,
1372      "can_reason": false,
1373      "supports_attachments": false,
1374      "options": {}
1375    },
1376    {
1377      "id": "mistralai/mistral-large-2512",
1378      "name": "Mistral: Mistral Large 3 2512",
1379      "cost_per_1m_in": 0.5,
1380      "cost_per_1m_out": 1.5,
1381      "cost_per_1m_in_cached": 0,
1382      "cost_per_1m_out_cached": 0,
1383      "context_window": 262144,
1384      "default_max_tokens": 26214,
1385      "can_reason": false,
1386      "supports_attachments": true,
1387      "options": {}
1388    },
1389    {
1390      "id": "mistralai/mistral-medium-3",
1391      "name": "Mistral: Mistral Medium 3",
1392      "cost_per_1m_in": 0.39999999999999997,
1393      "cost_per_1m_out": 2,
1394      "cost_per_1m_in_cached": 0,
1395      "cost_per_1m_out_cached": 0,
1396      "context_window": 131072,
1397      "default_max_tokens": 13107,
1398      "can_reason": false,
1399      "supports_attachments": true,
1400      "options": {}
1401    },
1402    {
1403      "id": "mistralai/mistral-medium-3.1",
1404      "name": "Mistral: Mistral Medium 3.1",
1405      "cost_per_1m_in": 0.39999999999999997,
1406      "cost_per_1m_out": 2,
1407      "cost_per_1m_in_cached": 0,
1408      "cost_per_1m_out_cached": 0,
1409      "context_window": 131072,
1410      "default_max_tokens": 13107,
1411      "can_reason": false,
1412      "supports_attachments": true,
1413      "options": {}
1414    },
1415    {
1416      "id": "mistralai/mistral-nemo",
1417      "name": "Mistral: Mistral Nemo",
1418      "cost_per_1m_in": 0.15,
1419      "cost_per_1m_out": 0.15,
1420      "cost_per_1m_in_cached": 0,
1421      "cost_per_1m_out_cached": 0,
1422      "context_window": 131072,
1423      "default_max_tokens": 13107,
1424      "can_reason": false,
1425      "supports_attachments": false,
1426      "options": {}
1427    },
1428    {
1429      "id": "mistralai/mistral-small-24b-instruct-2501",
1430      "name": "Mistral: Mistral Small 3",
1431      "cost_per_1m_in": 0.03,
1432      "cost_per_1m_out": 0.11,
1433      "cost_per_1m_in_cached": 0,
1434      "cost_per_1m_out_cached": 0,
1435      "context_window": 32768,
1436      "default_max_tokens": 16384,
1437      "can_reason": false,
1438      "supports_attachments": false,
1439      "options": {}
1440    },
1441    {
1442      "id": "mistralai/mistral-small-3.1-24b-instruct:free",
1443      "name": "Mistral: Mistral Small 3.1 24B (free)",
1444      "cost_per_1m_in": 0,
1445      "cost_per_1m_out": 0,
1446      "cost_per_1m_in_cached": 0,
1447      "cost_per_1m_out_cached": 0,
1448      "context_window": 128000,
1449      "default_max_tokens": 12800,
1450      "can_reason": false,
1451      "supports_attachments": true,
1452      "options": {}
1453    },
1454    {
1455      "id": "mistralai/mistral-small-3.2-24b-instruct",
1456      "name": "Mistral: Mistral Small 3.2 24B",
1457      "cost_per_1m_in": 0.09999999999999999,
1458      "cost_per_1m_out": 0.3,
1459      "cost_per_1m_in_cached": 0,
1460      "cost_per_1m_out_cached": 0,
1461      "context_window": 131072,
1462      "default_max_tokens": 13107,
1463      "can_reason": false,
1464      "supports_attachments": true,
1465      "options": {}
1466    },
1467    {
1468      "id": "mistralai/mistral-small-creative",
1469      "name": "Mistral: Mistral Small Creative",
1470      "cost_per_1m_in": 0.09999999999999999,
1471      "cost_per_1m_out": 0.3,
1472      "cost_per_1m_in_cached": 0,
1473      "cost_per_1m_out_cached": 0,
1474      "context_window": 32768,
1475      "default_max_tokens": 3276,
1476      "can_reason": false,
1477      "supports_attachments": false,
1478      "options": {}
1479    },
1480    {
1481      "id": "mistralai/mixtral-8x22b-instruct",
1482      "name": "Mistral: Mixtral 8x22B Instruct",
1483      "cost_per_1m_in": 2,
1484      "cost_per_1m_out": 6,
1485      "cost_per_1m_in_cached": 0,
1486      "cost_per_1m_out_cached": 0,
1487      "context_window": 65536,
1488      "default_max_tokens": 6553,
1489      "can_reason": false,
1490      "supports_attachments": false,
1491      "options": {}
1492    },
1493    {
1494      "id": "mistralai/mixtral-8x7b-instruct",
1495      "name": "Mistral: Mixtral 8x7B Instruct",
1496      "cost_per_1m_in": 0.54,
1497      "cost_per_1m_out": 0.54,
1498      "cost_per_1m_in_cached": 0,
1499      "cost_per_1m_out_cached": 0,
1500      "context_window": 32768,
1501      "default_max_tokens": 8192,
1502      "can_reason": false,
1503      "supports_attachments": false,
1504      "options": {}
1505    },
1506    {
1507      "id": "mistralai/pixtral-large-2411",
1508      "name": "Mistral: Pixtral Large 2411",
1509      "cost_per_1m_in": 2,
1510      "cost_per_1m_out": 6,
1511      "cost_per_1m_in_cached": 0,
1512      "cost_per_1m_out_cached": 0,
1513      "context_window": 131072,
1514      "default_max_tokens": 13107,
1515      "can_reason": false,
1516      "supports_attachments": true,
1517      "options": {}
1518    },
1519    {
1520      "id": "mistralai/mistral-saba",
1521      "name": "Mistral: Saba",
1522      "cost_per_1m_in": 0.19999999999999998,
1523      "cost_per_1m_out": 0.6,
1524      "cost_per_1m_in_cached": 0,
1525      "cost_per_1m_out_cached": 0,
1526      "context_window": 32768,
1527      "default_max_tokens": 3276,
1528      "can_reason": false,
1529      "supports_attachments": false,
1530      "options": {}
1531    },
1532    {
1533      "id": "mistralai/voxtral-small-24b-2507",
1534      "name": "Mistral: Voxtral Small 24B 2507",
1535      "cost_per_1m_in": 0.09999999999999999,
1536      "cost_per_1m_out": 0.3,
1537      "cost_per_1m_in_cached": 0,
1538      "cost_per_1m_out_cached": 0,
1539      "context_window": 32000,
1540      "default_max_tokens": 3200,
1541      "can_reason": false,
1542      "supports_attachments": false,
1543      "options": {}
1544    },
1545    {
1546      "id": "moonshotai/kimi-k2",
1547      "name": "MoonshotAI: Kimi K2 0711",
1548      "cost_per_1m_in": 0.5,
1549      "cost_per_1m_out": 2.4,
1550      "cost_per_1m_in_cached": 0,
1551      "cost_per_1m_out_cached": 0,
1552      "context_window": 131072,
1553      "default_max_tokens": 13107,
1554      "can_reason": false,
1555      "supports_attachments": false,
1556      "options": {}
1557    },
1558    {
1559      "id": "moonshotai/kimi-k2-0905",
1560      "name": "MoonshotAI: Kimi K2 0905",
1561      "cost_per_1m_in": 0.6,
1562      "cost_per_1m_out": 2.5,
1563      "cost_per_1m_in_cached": 0,
1564      "cost_per_1m_out_cached": 0,
1565      "context_window": 262144,
1566      "default_max_tokens": 26214,
1567      "can_reason": false,
1568      "supports_attachments": false,
1569      "options": {}
1570    },
1571    {
1572      "id": "moonshotai/kimi-k2-0905:exacto",
1573      "name": "MoonshotAI: Kimi K2 0905 (exacto)",
1574      "cost_per_1m_in": 0.6,
1575      "cost_per_1m_out": 2.5,
1576      "cost_per_1m_in_cached": 0,
1577      "cost_per_1m_out_cached": 0,
1578      "context_window": 262144,
1579      "default_max_tokens": 26214,
1580      "can_reason": false,
1581      "supports_attachments": false,
1582      "options": {}
1583    },
1584    {
1585      "id": "moonshotai/kimi-k2-thinking",
1586      "name": "MoonshotAI: Kimi K2 Thinking",
1587      "cost_per_1m_in": 0.6,
1588      "cost_per_1m_out": 2.5,
1589      "cost_per_1m_in_cached": 0,
1590      "cost_per_1m_out_cached": 0,
1591      "context_window": 262144,
1592      "default_max_tokens": 131072,
1593      "can_reason": true,
1594      "reasoning_levels": [
1595        "low",
1596        "medium",
1597        "high"
1598      ],
1599      "default_reasoning_effort": "medium",
1600      "supports_attachments": false,
1601      "options": {}
1602    },
1603    {
1604      "id": "nvidia/llama-3.1-nemotron-70b-instruct",
1605      "name": "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
1606      "cost_per_1m_in": 1.2,
1607      "cost_per_1m_out": 1.2,
1608      "cost_per_1m_in_cached": 0,
1609      "cost_per_1m_out_cached": 0,
1610      "context_window": 131072,
1611      "default_max_tokens": 8192,
1612      "can_reason": false,
1613      "supports_attachments": false,
1614      "options": {}
1615    },
1616    {
1617      "id": "nvidia/llama-3.3-nemotron-super-49b-v1.5",
1618      "name": "NVIDIA: Llama 3.3 Nemotron Super 49B V1.5",
1619      "cost_per_1m_in": 0.09999999999999999,
1620      "cost_per_1m_out": 0.39999999999999997,
1621      "cost_per_1m_in_cached": 0,
1622      "cost_per_1m_out_cached": 0,
1623      "context_window": 131072,
1624      "default_max_tokens": 13107,
1625      "can_reason": true,
1626      "reasoning_levels": [
1627        "low",
1628        "medium",
1629        "high"
1630      ],
1631      "default_reasoning_effort": "medium",
1632      "supports_attachments": false,
1633      "options": {}
1634    },
1635    {
1636      "id": "nvidia/nemotron-3-nano-30b-a3b",
1637      "name": "NVIDIA: Nemotron 3 Nano 30B A3B",
1638      "cost_per_1m_in": 0.06,
1639      "cost_per_1m_out": 0.24,
1640      "cost_per_1m_in_cached": 0,
1641      "cost_per_1m_out_cached": 0,
1642      "context_window": 262144,
1643      "default_max_tokens": 131072,
1644      "can_reason": true,
1645      "reasoning_levels": [
1646        "low",
1647        "medium",
1648        "high"
1649      ],
1650      "default_reasoning_effort": "medium",
1651      "supports_attachments": false,
1652      "options": {}
1653    },
1654    {
1655      "id": "nvidia/nemotron-3-nano-30b-a3b:free",
1656      "name": "NVIDIA: Nemotron 3 Nano 30B A3B (free)",
1657      "cost_per_1m_in": 0,
1658      "cost_per_1m_out": 0,
1659      "cost_per_1m_in_cached": 0,
1660      "cost_per_1m_out_cached": 0,
1661      "context_window": 256000,
1662      "default_max_tokens": 25600,
1663      "can_reason": true,
1664      "reasoning_levels": [
1665        "low",
1666        "medium",
1667        "high"
1668      ],
1669      "default_reasoning_effort": "medium",
1670      "supports_attachments": false,
1671      "options": {}
1672    },
1673    {
1674      "id": "nvidia/nemotron-nano-12b-v2-vl:free",
1675      "name": "NVIDIA: Nemotron Nano 12B 2 VL (free)",
1676      "cost_per_1m_in": 0,
1677      "cost_per_1m_out": 0,
1678      "cost_per_1m_in_cached": 0,
1679      "cost_per_1m_out_cached": 0,
1680      "context_window": 128000,
1681      "default_max_tokens": 64000,
1682      "can_reason": true,
1683      "reasoning_levels": [
1684        "low",
1685        "medium",
1686        "high"
1687      ],
1688      "default_reasoning_effort": "medium",
1689      "supports_attachments": true,
1690      "options": {}
1691    },
1692    {
1693      "id": "nvidia/nemotron-nano-9b-v2",
1694      "name": "NVIDIA: Nemotron Nano 9B V2",
1695      "cost_per_1m_in": 0.04,
1696      "cost_per_1m_out": 0.16,
1697      "cost_per_1m_in_cached": 0,
1698      "cost_per_1m_out_cached": 0,
1699      "context_window": 131072,
1700      "default_max_tokens": 13107,
1701      "can_reason": true,
1702      "reasoning_levels": [
1703        "low",
1704        "medium",
1705        "high"
1706      ],
1707      "default_reasoning_effort": "medium",
1708      "supports_attachments": false,
1709      "options": {}
1710    },
1711    {
1712      "id": "nvidia/nemotron-nano-9b-v2:free",
1713      "name": "NVIDIA: Nemotron Nano 9B V2 (free)",
1714      "cost_per_1m_in": 0,
1715      "cost_per_1m_out": 0,
1716      "cost_per_1m_in_cached": 0,
1717      "cost_per_1m_out_cached": 0,
1718      "context_window": 128000,
1719      "default_max_tokens": 12800,
1720      "can_reason": true,
1721      "reasoning_levels": [
1722        "low",
1723        "medium",
1724        "high"
1725      ],
1726      "default_reasoning_effort": "medium",
1727      "supports_attachments": false,
1728      "options": {}
1729    },
1730    {
1731      "id": "nex-agi/deepseek-v3.1-nex-n1:free",
1732      "name": "Nex AGI: DeepSeek V3.1 Nex N1 (free)",
1733      "cost_per_1m_in": 0,
1734      "cost_per_1m_out": 0,
1735      "cost_per_1m_in_cached": 0,
1736      "cost_per_1m_out_cached": 0,
1737      "context_window": 131072,
1738      "default_max_tokens": 81920,
1739      "can_reason": false,
1740      "supports_attachments": false,
1741      "options": {}
1742    },
1743    {
1744      "id": "nousresearch/deephermes-3-mistral-24b-preview",
1745      "name": "Nous: DeepHermes 3 Mistral 24B Preview",
1746      "cost_per_1m_in": 0.02,
1747      "cost_per_1m_out": 0.09999999999999999,
1748      "cost_per_1m_in_cached": 0,
1749      "cost_per_1m_out_cached": 0,
1750      "context_window": 32768,
1751      "default_max_tokens": 16384,
1752      "can_reason": true,
1753      "reasoning_levels": [
1754        "low",
1755        "medium",
1756        "high"
1757      ],
1758      "default_reasoning_effort": "medium",
1759      "supports_attachments": false,
1760      "options": {}
1761    },
1762    {
1763      "id": "openai/codex-mini",
1764      "name": "OpenAI: Codex Mini",
1765      "cost_per_1m_in": 1.5,
1766      "cost_per_1m_out": 6,
1767      "cost_per_1m_in_cached": 0,
1768      "cost_per_1m_out_cached": 0.375,
1769      "context_window": 200000,
1770      "default_max_tokens": 50000,
1771      "can_reason": true,
1772      "reasoning_levels": [
1773        "low",
1774        "medium",
1775        "high"
1776      ],
1777      "default_reasoning_effort": "medium",
1778      "supports_attachments": true,
1779      "options": {}
1780    },
1781    {
1782      "id": "openai/gpt-4-turbo",
1783      "name": "OpenAI: GPT-4 Turbo",
1784      "cost_per_1m_in": 10,
1785      "cost_per_1m_out": 30,
1786      "cost_per_1m_in_cached": 0,
1787      "cost_per_1m_out_cached": 0,
1788      "context_window": 128000,
1789      "default_max_tokens": 2048,
1790      "can_reason": false,
1791      "supports_attachments": true,
1792      "options": {}
1793    },
1794    {
1795      "id": "openai/gpt-4-1106-preview",
1796      "name": "OpenAI: GPT-4 Turbo (older v1106)",
1797      "cost_per_1m_in": 10,
1798      "cost_per_1m_out": 30,
1799      "cost_per_1m_in_cached": 0,
1800      "cost_per_1m_out_cached": 0,
1801      "context_window": 128000,
1802      "default_max_tokens": 2048,
1803      "can_reason": false,
1804      "supports_attachments": false,
1805      "options": {}
1806    },
1807    {
1808      "id": "openai/gpt-4-turbo-preview",
1809      "name": "OpenAI: GPT-4 Turbo Preview",
1810      "cost_per_1m_in": 10,
1811      "cost_per_1m_out": 30,
1812      "cost_per_1m_in_cached": 0,
1813      "cost_per_1m_out_cached": 0,
1814      "context_window": 128000,
1815      "default_max_tokens": 2048,
1816      "can_reason": false,
1817      "supports_attachments": false,
1818      "options": {}
1819    },
1820    {
1821      "id": "openai/gpt-4.1",
1822      "name": "OpenAI: GPT-4.1",
1823      "cost_per_1m_in": 2,
1824      "cost_per_1m_out": 8,
1825      "cost_per_1m_in_cached": 0,
1826      "cost_per_1m_out_cached": 0.5,
1827      "context_window": 1047576,
1828      "default_max_tokens": 104757,
1829      "can_reason": false,
1830      "supports_attachments": true,
1831      "options": {}
1832    },
1833    {
1834      "id": "openai/gpt-4.1-mini",
1835      "name": "OpenAI: GPT-4.1 Mini",
1836      "cost_per_1m_in": 0.39999999999999997,
1837      "cost_per_1m_out": 1.5999999999999999,
1838      "cost_per_1m_in_cached": 0,
1839      "cost_per_1m_out_cached": 0.09999999999999999,
1840      "context_window": 1047576,
1841      "default_max_tokens": 104757,
1842      "can_reason": false,
1843      "supports_attachments": true,
1844      "options": {}
1845    },
1846    {
1847      "id": "openai/gpt-4.1-nano",
1848      "name": "OpenAI: GPT-4.1 Nano",
1849      "cost_per_1m_in": 0.09999999999999999,
1850      "cost_per_1m_out": 0.39999999999999997,
1851      "cost_per_1m_in_cached": 0,
1852      "cost_per_1m_out_cached": 0.03,
1853      "context_window": 1047576,
1854      "default_max_tokens": 104757,
1855      "can_reason": false,
1856      "supports_attachments": true,
1857      "options": {}
1858    },
1859    {
1860      "id": "openai/gpt-4o",
1861      "name": "OpenAI: GPT-4o",
1862      "cost_per_1m_in": 2.5,
1863      "cost_per_1m_out": 10,
1864      "cost_per_1m_in_cached": 0,
1865      "cost_per_1m_out_cached": 0,
1866      "context_window": 128000,
1867      "default_max_tokens": 8192,
1868      "can_reason": false,
1869      "supports_attachments": true,
1870      "options": {}
1871    },
1872    {
1873      "id": "openai/gpt-4o-2024-05-13",
1874      "name": "OpenAI: GPT-4o (2024-05-13)",
1875      "cost_per_1m_in": 5,
1876      "cost_per_1m_out": 15,
1877      "cost_per_1m_in_cached": 0,
1878      "cost_per_1m_out_cached": 0,
1879      "context_window": 128000,
1880      "default_max_tokens": 2048,
1881      "can_reason": false,
1882      "supports_attachments": true,
1883      "options": {}
1884    },
1885    {
1886      "id": "openai/gpt-4o-2024-08-06",
1887      "name": "OpenAI: GPT-4o (2024-08-06)",
1888      "cost_per_1m_in": 2.5,
1889      "cost_per_1m_out": 10,
1890      "cost_per_1m_in_cached": 0,
1891      "cost_per_1m_out_cached": 1.25,
1892      "context_window": 128000,
1893      "default_max_tokens": 8192,
1894      "can_reason": false,
1895      "supports_attachments": true,
1896      "options": {}
1897    },
1898    {
1899      "id": "openai/gpt-4o-2024-11-20",
1900      "name": "OpenAI: GPT-4o (2024-11-20)",
1901      "cost_per_1m_in": 2.5,
1902      "cost_per_1m_out": 10,
1903      "cost_per_1m_in_cached": 0,
1904      "cost_per_1m_out_cached": 1.25,
1905      "context_window": 128000,
1906      "default_max_tokens": 8192,
1907      "can_reason": false,
1908      "supports_attachments": true,
1909      "options": {}
1910    },
1911    {
1912      "id": "openai/gpt-4o:extended",
1913      "name": "OpenAI: GPT-4o (extended)",
1914      "cost_per_1m_in": 6,
1915      "cost_per_1m_out": 18,
1916      "cost_per_1m_in_cached": 0,
1917      "cost_per_1m_out_cached": 0,
1918      "context_window": 128000,
1919      "default_max_tokens": 32000,
1920      "can_reason": false,
1921      "supports_attachments": true,
1922      "options": {}
1923    },
1924    {
1925      "id": "openai/gpt-4o-audio-preview",
1926      "name": "OpenAI: GPT-4o Audio",
1927      "cost_per_1m_in": 2.5,
1928      "cost_per_1m_out": 10,
1929      "cost_per_1m_in_cached": 0,
1930      "cost_per_1m_out_cached": 0,
1931      "context_window": 128000,
1932      "default_max_tokens": 8192,
1933      "can_reason": false,
1934      "supports_attachments": false,
1935      "options": {}
1936    },
1937    {
1938      "id": "openai/gpt-4o-mini",
1939      "name": "OpenAI: GPT-4o-mini",
1940      "cost_per_1m_in": 0.15,
1941      "cost_per_1m_out": 0.6,
1942      "cost_per_1m_in_cached": 0,
1943      "cost_per_1m_out_cached": 0.075,
1944      "context_window": 128000,
1945      "default_max_tokens": 8192,
1946      "can_reason": false,
1947      "supports_attachments": true,
1948      "options": {}
1949    },
1950    {
1951      "id": "openai/gpt-4o-mini-2024-07-18",
1952      "name": "OpenAI: GPT-4o-mini (2024-07-18)",
1953      "cost_per_1m_in": 0.15,
1954      "cost_per_1m_out": 0.6,
1955      "cost_per_1m_in_cached": 0,
1956      "cost_per_1m_out_cached": 0.075,
1957      "context_window": 128000,
1958      "default_max_tokens": 8192,
1959      "can_reason": false,
1960      "supports_attachments": true,
1961      "options": {}
1962    },
1963    {
1964      "id": "openai/gpt-5",
1965      "name": "OpenAI: GPT-5",
1966      "cost_per_1m_in": 1.25,
1967      "cost_per_1m_out": 10,
1968      "cost_per_1m_in_cached": 0,
1969      "cost_per_1m_out_cached": 0.125,
1970      "context_window": 400000,
1971      "default_max_tokens": 64000,
1972      "can_reason": true,
1973      "reasoning_levels": [
1974        "low",
1975        "medium",
1976        "high"
1977      ],
1978      "default_reasoning_effort": "medium",
1979      "supports_attachments": true,
1980      "options": {}
1981    },
1982    {
1983      "id": "openai/gpt-5-codex",
1984      "name": "OpenAI: GPT-5 Codex",
1985      "cost_per_1m_in": 1.25,
1986      "cost_per_1m_out": 10,
1987      "cost_per_1m_in_cached": 0,
1988      "cost_per_1m_out_cached": 0.125,
1989      "context_window": 400000,
1990      "default_max_tokens": 64000,
1991      "can_reason": true,
1992      "reasoning_levels": [
1993        "low",
1994        "medium",
1995        "high"
1996      ],
1997      "default_reasoning_effort": "medium",
1998      "supports_attachments": true,
1999      "options": {}
2000    },
2001    {
2002      "id": "openai/gpt-5-image",
2003      "name": "OpenAI: GPT-5 Image",
2004      "cost_per_1m_in": 10,
2005      "cost_per_1m_out": 10,
2006      "cost_per_1m_in_cached": 0,
2007      "cost_per_1m_out_cached": 1.25,
2008      "context_window": 400000,
2009      "default_max_tokens": 64000,
2010      "can_reason": true,
2011      "reasoning_levels": [
2012        "low",
2013        "medium",
2014        "high"
2015      ],
2016      "default_reasoning_effort": "medium",
2017      "supports_attachments": true,
2018      "options": {}
2019    },
2020    {
2021      "id": "openai/gpt-5-image-mini",
2022      "name": "OpenAI: GPT-5 Image Mini",
2023      "cost_per_1m_in": 2.5,
2024      "cost_per_1m_out": 2,
2025      "cost_per_1m_in_cached": 0,
2026      "cost_per_1m_out_cached": 0.25,
2027      "context_window": 400000,
2028      "default_max_tokens": 64000,
2029      "can_reason": true,
2030      "reasoning_levels": [
2031        "low",
2032        "medium",
2033        "high"
2034      ],
2035      "default_reasoning_effort": "medium",
2036      "supports_attachments": true,
2037      "options": {}
2038    },
2039    {
2040      "id": "openai/gpt-5-mini",
2041      "name": "OpenAI: GPT-5 Mini",
2042      "cost_per_1m_in": 0.25,
2043      "cost_per_1m_out": 2,
2044      "cost_per_1m_in_cached": 0,
2045      "cost_per_1m_out_cached": 0.03,
2046      "context_window": 400000,
2047      "default_max_tokens": 40000,
2048      "can_reason": true,
2049      "reasoning_levels": [
2050        "low",
2051        "medium",
2052        "high"
2053      ],
2054      "default_reasoning_effort": "medium",
2055      "supports_attachments": true,
2056      "options": {}
2057    },
2058    {
2059      "id": "openai/gpt-5-nano",
2060      "name": "OpenAI: GPT-5 Nano",
2061      "cost_per_1m_in": 0.049999999999999996,
2062      "cost_per_1m_out": 0.39999999999999997,
2063      "cost_per_1m_in_cached": 0,
2064      "cost_per_1m_out_cached": 0.01,
2065      "context_window": 400000,
2066      "default_max_tokens": 40000,
2067      "can_reason": true,
2068      "reasoning_levels": [
2069        "low",
2070        "medium",
2071        "high"
2072      ],
2073      "default_reasoning_effort": "medium",
2074      "supports_attachments": true,
2075      "options": {}
2076    },
2077    {
2078      "id": "openai/gpt-5-pro",
2079      "name": "OpenAI: GPT-5 Pro",
2080      "cost_per_1m_in": 15,
2081      "cost_per_1m_out": 120,
2082      "cost_per_1m_in_cached": 0,
2083      "cost_per_1m_out_cached": 0,
2084      "context_window": 400000,
2085      "default_max_tokens": 64000,
2086      "can_reason": true,
2087      "reasoning_levels": [
2088        "low",
2089        "medium",
2090        "high"
2091      ],
2092      "default_reasoning_effort": "medium",
2093      "supports_attachments": true,
2094      "options": {}
2095    },
2096    {
2097      "id": "openai/gpt-5.1",
2098      "name": "OpenAI: GPT-5.1",
2099      "cost_per_1m_in": 1.25,
2100      "cost_per_1m_out": 10,
2101      "cost_per_1m_in_cached": 0,
2102      "cost_per_1m_out_cached": 0.125,
2103      "context_window": 400000,
2104      "default_max_tokens": 64000,
2105      "can_reason": true,
2106      "reasoning_levels": [
2107        "low",
2108        "medium",
2109        "high"
2110      ],
2111      "default_reasoning_effort": "medium",
2112      "supports_attachments": true,
2113      "options": {}
2114    },
2115    {
2116      "id": "openai/gpt-5.1-chat",
2117      "name": "OpenAI: GPT-5.1 Chat",
2118      "cost_per_1m_in": 1.25,
2119      "cost_per_1m_out": 10,
2120      "cost_per_1m_in_cached": 0,
2121      "cost_per_1m_out_cached": 0.125,
2122      "context_window": 128000,
2123      "default_max_tokens": 8192,
2124      "can_reason": false,
2125      "supports_attachments": true,
2126      "options": {}
2127    },
2128    {
2129      "id": "openai/gpt-5.1-codex",
2130      "name": "OpenAI: GPT-5.1-Codex",
2131      "cost_per_1m_in": 1.25,
2132      "cost_per_1m_out": 10,
2133      "cost_per_1m_in_cached": 0,
2134      "cost_per_1m_out_cached": 0.125,
2135      "context_window": 400000,
2136      "default_max_tokens": 64000,
2137      "can_reason": true,
2138      "reasoning_levels": [
2139        "low",
2140        "medium",
2141        "high"
2142      ],
2143      "default_reasoning_effort": "medium",
2144      "supports_attachments": true,
2145      "options": {}
2146    },
2147    {
2148      "id": "openai/gpt-5.1-codex-max",
2149      "name": "OpenAI: GPT-5.1-Codex-Max",
2150      "cost_per_1m_in": 1.25,
2151      "cost_per_1m_out": 10,
2152      "cost_per_1m_in_cached": 0,
2153      "cost_per_1m_out_cached": 0.125,
2154      "context_window": 400000,
2155      "default_max_tokens": 64000,
2156      "can_reason": true,
2157      "reasoning_levels": [
2158        "low",
2159        "medium",
2160        "high"
2161      ],
2162      "default_reasoning_effort": "medium",
2163      "supports_attachments": true,
2164      "options": {}
2165    },
2166    {
2167      "id": "openai/gpt-5.1-codex-mini",
2168      "name": "OpenAI: GPT-5.1-Codex-Mini",
2169      "cost_per_1m_in": 0.25,
2170      "cost_per_1m_out": 2,
2171      "cost_per_1m_in_cached": 0,
2172      "cost_per_1m_out_cached": 0.024999999999999998,
2173      "context_window": 400000,
2174      "default_max_tokens": 50000,
2175      "can_reason": true,
2176      "reasoning_levels": [
2177        "low",
2178        "medium",
2179        "high"
2180      ],
2181      "default_reasoning_effort": "medium",
2182      "supports_attachments": true,
2183      "options": {}
2184    },
2185    {
2186      "id": "openai/gpt-5.2",
2187      "name": "OpenAI: GPT-5.2",
2188      "cost_per_1m_in": 1.75,
2189      "cost_per_1m_out": 14,
2190      "cost_per_1m_in_cached": 0,
2191      "cost_per_1m_out_cached": 0.175,
2192      "context_window": 400000,
2193      "default_max_tokens": 64000,
2194      "can_reason": true,
2195      "reasoning_levels": [
2196        "low",
2197        "medium",
2198        "high"
2199      ],
2200      "default_reasoning_effort": "medium",
2201      "supports_attachments": true,
2202      "options": {}
2203    },
2204    {
2205      "id": "openai/gpt-5.2-chat",
2206      "name": "OpenAI: GPT-5.2 Chat",
2207      "cost_per_1m_in": 1.75,
2208      "cost_per_1m_out": 14,
2209      "cost_per_1m_in_cached": 0,
2210      "cost_per_1m_out_cached": 0.175,
2211      "context_window": 128000,
2212      "default_max_tokens": 8192,
2213      "can_reason": false,
2214      "supports_attachments": true,
2215      "options": {}
2216    },
2217    {
2218      "id": "openai/gpt-5.2-pro",
2219      "name": "OpenAI: GPT-5.2 Pro",
2220      "cost_per_1m_in": 21,
2221      "cost_per_1m_out": 168,
2222      "cost_per_1m_in_cached": 0,
2223      "cost_per_1m_out_cached": 0,
2224      "context_window": 400000,
2225      "default_max_tokens": 64000,
2226      "can_reason": true,
2227      "reasoning_levels": [
2228        "low",
2229        "medium",
2230        "high"
2231      ],
2232      "default_reasoning_effort": "medium",
2233      "supports_attachments": true,
2234      "options": {}
2235    },
2236    {
2237      "id": "openai/gpt-oss-120b",
2238      "name": "OpenAI: gpt-oss-120b",
2239      "cost_per_1m_in": 0.039,
2240      "cost_per_1m_out": 0.19,
2241      "cost_per_1m_in_cached": 0,
2242      "cost_per_1m_out_cached": 0,
2243      "context_window": 131072,
2244      "default_max_tokens": 13107,
2245      "can_reason": true,
2246      "reasoning_levels": [
2247        "low",
2248        "medium",
2249        "high"
2250      ],
2251      "default_reasoning_effort": "medium",
2252      "supports_attachments": false,
2253      "options": {}
2254    },
2255    {
2256      "id": "openai/gpt-oss-120b:exacto",
2257      "name": "OpenAI: gpt-oss-120b (exacto)",
2258      "cost_per_1m_in": 0.039,
2259      "cost_per_1m_out": 0.19,
2260      "cost_per_1m_in_cached": 0,
2261      "cost_per_1m_out_cached": 0,
2262      "context_window": 131072,
2263      "default_max_tokens": 13107,
2264      "can_reason": true,
2265      "reasoning_levels": [
2266        "low",
2267        "medium",
2268        "high"
2269      ],
2270      "default_reasoning_effort": "medium",
2271      "supports_attachments": false,
2272      "options": {}
2273    },
2274    {
2275      "id": "openai/gpt-oss-120b:free",
2276      "name": "OpenAI: gpt-oss-120b (free)",
2277      "cost_per_1m_in": 0,
2278      "cost_per_1m_out": 0,
2279      "cost_per_1m_in_cached": 0,
2280      "cost_per_1m_out_cached": 0,
2281      "context_window": 131072,
2282      "default_max_tokens": 13107,
2283      "can_reason": true,
2284      "reasoning_levels": [
2285        "low",
2286        "medium",
2287        "high"
2288      ],
2289      "default_reasoning_effort": "medium",
2290      "supports_attachments": false,
2291      "options": {}
2292    },
2293    {
2294      "id": "openai/gpt-oss-20b",
2295      "name": "OpenAI: gpt-oss-20b",
2296      "cost_per_1m_in": 0.03,
2297      "cost_per_1m_out": 0.14,
2298      "cost_per_1m_in_cached": 0,
2299      "cost_per_1m_out_cached": 0,
2300      "context_window": 131072,
2301      "default_max_tokens": 13107,
2302      "can_reason": true,
2303      "reasoning_levels": [
2304        "low",
2305        "medium",
2306        "high"
2307      ],
2308      "default_reasoning_effort": "medium",
2309      "supports_attachments": false,
2310      "options": {}
2311    },
2312    {
2313      "id": "openai/gpt-oss-20b:free",
2314      "name": "OpenAI: gpt-oss-20b (free)",
2315      "cost_per_1m_in": 0,
2316      "cost_per_1m_out": 0,
2317      "cost_per_1m_in_cached": 0,
2318      "cost_per_1m_out_cached": 0,
2319      "context_window": 131072,
2320      "default_max_tokens": 13107,
2321      "can_reason": true,
2322      "reasoning_levels": [
2323        "low",
2324        "medium",
2325        "high"
2326      ],
2327      "default_reasoning_effort": "medium",
2328      "supports_attachments": false,
2329      "options": {}
2330    },
2331    {
2332      "id": "openai/gpt-oss-safeguard-20b",
2333      "name": "OpenAI: gpt-oss-safeguard-20b",
2334      "cost_per_1m_in": 0.075,
2335      "cost_per_1m_out": 0.3,
2336      "cost_per_1m_in_cached": 0,
2337      "cost_per_1m_out_cached": 0.037,
2338      "context_window": 131072,
2339      "default_max_tokens": 32768,
2340      "can_reason": true,
2341      "reasoning_levels": [
2342        "low",
2343        "medium",
2344        "high"
2345      ],
2346      "default_reasoning_effort": "medium",
2347      "supports_attachments": false,
2348      "options": {}
2349    },
2350    {
2351      "id": "openai/o1",
2352      "name": "OpenAI: o1",
2353      "cost_per_1m_in": 15,
2354      "cost_per_1m_out": 60,
2355      "cost_per_1m_in_cached": 0,
2356      "cost_per_1m_out_cached": 7.5,
2357      "context_window": 200000,
2358      "default_max_tokens": 50000,
2359      "can_reason": false,
2360      "supports_attachments": true,
2361      "options": {}
2362    },
2363    {
2364      "id": "openai/o3",
2365      "name": "OpenAI: o3",
2366      "cost_per_1m_in": 2,
2367      "cost_per_1m_out": 8,
2368      "cost_per_1m_in_cached": 0,
2369      "cost_per_1m_out_cached": 0.5,
2370      "context_window": 200000,
2371      "default_max_tokens": 50000,
2372      "can_reason": true,
2373      "reasoning_levels": [
2374        "low",
2375        "medium",
2376        "high"
2377      ],
2378      "default_reasoning_effort": "medium",
2379      "supports_attachments": true,
2380      "options": {}
2381    },
2382    {
2383      "id": "openai/o3-deep-research",
2384      "name": "OpenAI: o3 Deep Research",
2385      "cost_per_1m_in": 10,
2386      "cost_per_1m_out": 40,
2387      "cost_per_1m_in_cached": 0,
2388      "cost_per_1m_out_cached": 2.5,
2389      "context_window": 200000,
2390      "default_max_tokens": 50000,
2391      "can_reason": true,
2392      "reasoning_levels": [
2393        "low",
2394        "medium",
2395        "high"
2396      ],
2397      "default_reasoning_effort": "medium",
2398      "supports_attachments": true,
2399      "options": {}
2400    },
2401    {
2402      "id": "openai/o3-mini",
2403      "name": "OpenAI: o3 Mini",
2404      "cost_per_1m_in": 1.1,
2405      "cost_per_1m_out": 4.4,
2406      "cost_per_1m_in_cached": 0,
2407      "cost_per_1m_out_cached": 0.55,
2408      "context_window": 200000,
2409      "default_max_tokens": 50000,
2410      "can_reason": false,
2411      "supports_attachments": false,
2412      "options": {}
2413    },
2414    {
2415      "id": "openai/o3-mini-high",
2416      "name": "OpenAI: o3 Mini High",
2417      "cost_per_1m_in": 1.1,
2418      "cost_per_1m_out": 4.4,
2419      "cost_per_1m_in_cached": 0,
2420      "cost_per_1m_out_cached": 0.55,
2421      "context_window": 200000,
2422      "default_max_tokens": 50000,
2423      "can_reason": false,
2424      "supports_attachments": false,
2425      "options": {}
2426    },
2427    {
2428      "id": "openai/o3-pro",
2429      "name": "OpenAI: o3 Pro",
2430      "cost_per_1m_in": 20,
2431      "cost_per_1m_out": 80,
2432      "cost_per_1m_in_cached": 0,
2433      "cost_per_1m_out_cached": 0,
2434      "context_window": 200000,
2435      "default_max_tokens": 50000,
2436      "can_reason": true,
2437      "reasoning_levels": [
2438        "low",
2439        "medium",
2440        "high"
2441      ],
2442      "default_reasoning_effort": "medium",
2443      "supports_attachments": true,
2444      "options": {}
2445    },
2446    {
2447      "id": "openai/o4-mini",
2448      "name": "OpenAI: o4 Mini",
2449      "cost_per_1m_in": 1.1,
2450      "cost_per_1m_out": 4.4,
2451      "cost_per_1m_in_cached": 0,
2452      "cost_per_1m_out_cached": 0.275,
2453      "context_window": 200000,
2454      "default_max_tokens": 50000,
2455      "can_reason": true,
2456      "reasoning_levels": [
2457        "low",
2458        "medium",
2459        "high"
2460      ],
2461      "default_reasoning_effort": "medium",
2462      "supports_attachments": true,
2463      "options": {}
2464    },
2465    {
2466      "id": "openai/o4-mini-deep-research",
2467      "name": "OpenAI: o4 Mini Deep Research",
2468      "cost_per_1m_in": 2,
2469      "cost_per_1m_out": 8,
2470      "cost_per_1m_in_cached": 0,
2471      "cost_per_1m_out_cached": 0.5,
2472      "context_window": 200000,
2473      "default_max_tokens": 50000,
2474      "can_reason": true,
2475      "reasoning_levels": [
2476        "low",
2477        "medium",
2478        "high"
2479      ],
2480      "default_reasoning_effort": "medium",
2481      "supports_attachments": true,
2482      "options": {}
2483    },
2484    {
2485      "id": "openai/o4-mini-high",
2486      "name": "OpenAI: o4 Mini High",
2487      "cost_per_1m_in": 1.1,
2488      "cost_per_1m_out": 4.4,
2489      "cost_per_1m_in_cached": 0,
2490      "cost_per_1m_out_cached": 0.275,
2491      "context_window": 200000,
2492      "default_max_tokens": 50000,
2493      "can_reason": true,
2494      "reasoning_levels": [
2495        "low",
2496        "medium",
2497        "high"
2498      ],
2499      "default_reasoning_effort": "medium",
2500      "supports_attachments": true,
2501      "options": {}
2502    },
2503    {
2504      "id": "prime-intellect/intellect-3",
2505      "name": "Prime Intellect: INTELLECT-3",
2506      "cost_per_1m_in": 0.19999999999999998,
2507      "cost_per_1m_out": 1.1,
2508      "cost_per_1m_in_cached": 0,
2509      "cost_per_1m_out_cached": 0,
2510      "context_window": 131072,
2511      "default_max_tokens": 65536,
2512      "can_reason": true,
2513      "reasoning_levels": [
2514        "low",
2515        "medium",
2516        "high"
2517      ],
2518      "default_reasoning_effort": "medium",
2519      "supports_attachments": false,
2520      "options": {}
2521    },
2522    {
2523      "id": "qwen/qwen-2.5-72b-instruct",
2524      "name": "Qwen2.5 72B Instruct",
2525      "cost_per_1m_in": 0.12,
2526      "cost_per_1m_out": 0.39,
2527      "cost_per_1m_in_cached": 0,
2528      "cost_per_1m_out_cached": 0,
2529      "context_window": 32768,
2530      "default_max_tokens": 8192,
2531      "can_reason": false,
2532      "supports_attachments": false,
2533      "options": {}
2534    },
2535    {
2536      "id": "qwen/qwq-32b",
2537      "name": "Qwen: QwQ 32B",
2538      "cost_per_1m_in": 0.15,
2539      "cost_per_1m_out": 0.58,
2540      "cost_per_1m_in_cached": 0,
2541      "cost_per_1m_out_cached": 0,
2542      "context_window": 131072,
2543      "default_max_tokens": 65536,
2544      "can_reason": true,
2545      "reasoning_levels": [
2546        "low",
2547        "medium",
2548        "high"
2549      ],
2550      "default_reasoning_effort": "medium",
2551      "supports_attachments": false,
2552      "options": {}
2553    },
2554    {
2555      "id": "qwen/qwen-plus-2025-07-28",
2556      "name": "Qwen: Qwen Plus 0728",
2557      "cost_per_1m_in": 0.39999999999999997,
2558      "cost_per_1m_out": 1.2,
2559      "cost_per_1m_in_cached": 0,
2560      "cost_per_1m_out_cached": 0,
2561      "context_window": 1000000,
2562      "default_max_tokens": 16384,
2563      "can_reason": false,
2564      "supports_attachments": false,
2565      "options": {}
2566    },
2567    {
2568      "id": "qwen/qwen-plus-2025-07-28:thinking",
2569      "name": "Qwen: Qwen Plus 0728 (thinking)",
2570      "cost_per_1m_in": 0.39999999999999997,
2571      "cost_per_1m_out": 4,
2572      "cost_per_1m_in_cached": 0,
2573      "cost_per_1m_out_cached": 0,
2574      "context_window": 1000000,
2575      "default_max_tokens": 16384,
2576      "can_reason": true,
2577      "reasoning_levels": [
2578        "low",
2579        "medium",
2580        "high"
2581      ],
2582      "default_reasoning_effort": "medium",
2583      "supports_attachments": false,
2584      "options": {}
2585    },
2586    {
2587      "id": "qwen/qwen-vl-max",
2588      "name": "Qwen: Qwen VL Max",
2589      "cost_per_1m_in": 0.7999999999999999,
2590      "cost_per_1m_out": 3.1999999999999997,
2591      "cost_per_1m_in_cached": 0,
2592      "cost_per_1m_out_cached": 0,
2593      "context_window": 131072,
2594      "default_max_tokens": 4096,
2595      "can_reason": false,
2596      "supports_attachments": true,
2597      "options": {}
2598    },
2599    {
2600      "id": "qwen/qwen-max",
2601      "name": "Qwen: Qwen-Max ",
2602      "cost_per_1m_in": 1.5999999999999999,
2603      "cost_per_1m_out": 6.3999999999999995,
2604      "cost_per_1m_in_cached": 0,
2605      "cost_per_1m_out_cached": 0.64,
2606      "context_window": 32768,
2607      "default_max_tokens": 4096,
2608      "can_reason": false,
2609      "supports_attachments": false,
2610      "options": {}
2611    },
2612    {
2613      "id": "qwen/qwen-plus",
2614      "name": "Qwen: Qwen-Plus",
2615      "cost_per_1m_in": 0.39999999999999997,
2616      "cost_per_1m_out": 1.2,
2617      "cost_per_1m_in_cached": 0,
2618      "cost_per_1m_out_cached": 0.16,
2619      "context_window": 131072,
2620      "default_max_tokens": 4096,
2621      "can_reason": false,
2622      "supports_attachments": false,
2623      "options": {}
2624    },
2625    {
2626      "id": "qwen/qwen-turbo",
2627      "name": "Qwen: Qwen-Turbo",
2628      "cost_per_1m_in": 0.049999999999999996,
2629      "cost_per_1m_out": 0.19999999999999998,
2630      "cost_per_1m_in_cached": 0,
2631      "cost_per_1m_out_cached": 0.02,
2632      "context_window": 1000000,
2633      "default_max_tokens": 4096,
2634      "can_reason": false,
2635      "supports_attachments": false,
2636      "options": {}
2637    },
2638    {
2639      "id": "qwen/qwen3-14b",
2640      "name": "Qwen: Qwen3 14B",
2641      "cost_per_1m_in": 0.049999999999999996,
2642      "cost_per_1m_out": 0.22,
2643      "cost_per_1m_in_cached": 0,
2644      "cost_per_1m_out_cached": 0,
2645      "context_window": 40960,
2646      "default_max_tokens": 20480,
2647      "can_reason": true,
2648      "reasoning_levels": [
2649        "low",
2650        "medium",
2651        "high"
2652      ],
2653      "default_reasoning_effort": "medium",
2654      "supports_attachments": false,
2655      "options": {}
2656    },
2657    {
2658      "id": "qwen/qwen3-235b-a22b",
2659      "name": "Qwen: Qwen3 235B A22B",
2660      "cost_per_1m_in": 0.22,
2661      "cost_per_1m_out": 0.88,
2662      "cost_per_1m_in_cached": 0,
2663      "cost_per_1m_out_cached": 0,
2664      "context_window": 131072,
2665      "default_max_tokens": 13107,
2666      "can_reason": true,
2667      "reasoning_levels": [
2668        "low",
2669        "medium",
2670        "high"
2671      ],
2672      "default_reasoning_effort": "medium",
2673      "supports_attachments": false,
2674      "options": {}
2675    },
2676    {
2677      "id": "qwen/qwen3-235b-a22b-2507",
2678      "name": "Qwen: Qwen3 235B A22B Instruct 2507",
2679      "cost_per_1m_in": 0.08,
2680      "cost_per_1m_out": 0.55,
2681      "cost_per_1m_in_cached": 0,
2682      "cost_per_1m_out_cached": 0,
2683      "context_window": 262144,
2684      "default_max_tokens": 32768,
2685      "can_reason": false,
2686      "supports_attachments": false,
2687      "options": {}
2688    },
2689    {
2690      "id": "qwen/qwen3-235b-a22b-thinking-2507",
2691      "name": "Qwen: Qwen3 235B A22B Thinking 2507",
2692      "cost_per_1m_in": 0.11,
2693      "cost_per_1m_out": 0.6,
2694      "cost_per_1m_in_cached": 0,
2695      "cost_per_1m_out_cached": 0,
2696      "context_window": 262144,
2697      "default_max_tokens": 131072,
2698      "can_reason": true,
2699      "reasoning_levels": [
2700        "low",
2701        "medium",
2702        "high"
2703      ],
2704      "default_reasoning_effort": "medium",
2705      "supports_attachments": false,
2706      "options": {}
2707    },
2708    {
2709      "id": "qwen/qwen3-30b-a3b",
2710      "name": "Qwen: Qwen3 30B A3B",
2711      "cost_per_1m_in": 0.08,
2712      "cost_per_1m_out": 0.28,
2713      "cost_per_1m_in_cached": 0,
2714      "cost_per_1m_out_cached": 0,
2715      "context_window": 131072,
2716      "default_max_tokens": 65536,
2717      "can_reason": true,
2718      "reasoning_levels": [
2719        "low",
2720        "medium",
2721        "high"
2722      ],
2723      "default_reasoning_effort": "medium",
2724      "supports_attachments": false,
2725      "options": {}
2726    },
2727    {
2728      "id": "qwen/qwen3-30b-a3b-instruct-2507",
2729      "name": "Qwen: Qwen3 30B A3B Instruct 2507",
2730      "cost_per_1m_in": 0.08,
2731      "cost_per_1m_out": 0.33,
2732      "cost_per_1m_in_cached": 0,
2733      "cost_per_1m_out_cached": 0,
2734      "context_window": 262144,
2735      "default_max_tokens": 131072,
2736      "can_reason": false,
2737      "supports_attachments": false,
2738      "options": {}
2739    },
2740    {
2741      "id": "qwen/qwen3-30b-a3b-thinking-2507",
2742      "name": "Qwen: Qwen3 30B A3B Thinking 2507",
2743      "cost_per_1m_in": 0.09999999999999999,
2744      "cost_per_1m_out": 0.3,
2745      "cost_per_1m_in_cached": 0,
2746      "cost_per_1m_out_cached": 0,
2747      "context_window": 262144,
2748      "default_max_tokens": 26214,
2749      "can_reason": true,
2750      "reasoning_levels": [
2751        "low",
2752        "medium",
2753        "high"
2754      ],
2755      "default_reasoning_effort": "medium",
2756      "supports_attachments": false,
2757      "options": {}
2758    },
2759    {
2760      "id": "qwen/qwen3-32b",
2761      "name": "Qwen: Qwen3 32B",
2762      "cost_per_1m_in": 0.15,
2763      "cost_per_1m_out": 0.5,
2764      "cost_per_1m_in_cached": 0,
2765      "cost_per_1m_out_cached": 0,
2766      "context_window": 131072,
2767      "default_max_tokens": 4000,
2768      "can_reason": true,
2769      "reasoning_levels": [
2770        "low",
2771        "medium",
2772        "high"
2773      ],
2774      "default_reasoning_effort": "medium",
2775      "supports_attachments": false,
2776      "options": {}
2777    },
2778    {
2779      "id": "qwen/qwen3-4b:free",
2780      "name": "Qwen: Qwen3 4B (free)",
2781      "cost_per_1m_in": 0,
2782      "cost_per_1m_out": 0,
2783      "cost_per_1m_in_cached": 0,
2784      "cost_per_1m_out_cached": 0,
2785      "context_window": 40960,
2786      "default_max_tokens": 4096,
2787      "can_reason": true,
2788      "reasoning_levels": [
2789        "low",
2790        "medium",
2791        "high"
2792      ],
2793      "default_reasoning_effort": "medium",
2794      "supports_attachments": false,
2795      "options": {}
2796    },
2797    {
2798      "id": "qwen/qwen3-8b",
2799      "name": "Qwen: Qwen3 8B",
2800      "cost_per_1m_in": 0.2,
2801      "cost_per_1m_out": 0.2,
2802      "cost_per_1m_in_cached": 0,
2803      "cost_per_1m_out_cached": 0,
2804      "context_window": 40960,
2805      "default_max_tokens": 4096,
2806      "can_reason": true,
2807      "reasoning_levels": [
2808        "low",
2809        "medium",
2810        "high"
2811      ],
2812      "default_reasoning_effort": "medium",
2813      "supports_attachments": false,
2814      "options": {}
2815    },
2816    {
2817      "id": "qwen/qwen3-coder-30b-a3b-instruct",
2818      "name": "Qwen: Qwen3 Coder 30B A3B Instruct",
2819      "cost_per_1m_in": 0.09999999999999999,
2820      "cost_per_1m_out": 0.3,
2821      "cost_per_1m_in_cached": 0,
2822      "cost_per_1m_out_cached": 0,
2823      "context_window": 262144,
2824      "default_max_tokens": 26214,
2825      "can_reason": false,
2826      "supports_attachments": false,
2827      "options": {}
2828    },
2829    {
2830      "id": "qwen/qwen3-coder",
2831      "name": "Qwen: Qwen3 Coder 480B A35B",
2832      "cost_per_1m_in": 0.22,
2833      "cost_per_1m_out": 1.7999999999999998,
2834      "cost_per_1m_in_cached": 0,
2835      "cost_per_1m_out_cached": 0,
2836      "context_window": 262144,
2837      "default_max_tokens": 32768,
2838      "can_reason": false,
2839      "supports_attachments": false,
2840      "options": {}
2841    },
2842    {
2843      "id": "qwen/qwen3-coder:exacto",
2844      "name": "Qwen: Qwen3 Coder 480B A35B (exacto)",
2845      "cost_per_1m_in": 0.22,
2846      "cost_per_1m_out": 1.7999999999999998,
2847      "cost_per_1m_in_cached": 0,
2848      "cost_per_1m_out_cached": 0,
2849      "context_window": 262144,
2850      "default_max_tokens": 32768,
2851      "can_reason": false,
2852      "supports_attachments": false,
2853      "options": {}
2854    },
2855    {
2856      "id": "qwen/qwen3-coder:free",
2857      "name": "Qwen: Qwen3 Coder 480B A35B (free)",
2858      "cost_per_1m_in": 0,
2859      "cost_per_1m_out": 0,
2860      "cost_per_1m_in_cached": 0,
2861      "cost_per_1m_out_cached": 0,
2862      "context_window": 262000,
2863      "default_max_tokens": 131000,
2864      "can_reason": false,
2865      "supports_attachments": false,
2866      "options": {}
2867    },
2868    {
2869      "id": "qwen/qwen3-coder-flash",
2870      "name": "Qwen: Qwen3 Coder Flash",
2871      "cost_per_1m_in": 0.3,
2872      "cost_per_1m_out": 1.5,
2873      "cost_per_1m_in_cached": 0,
2874      "cost_per_1m_out_cached": 0.08,
2875      "context_window": 128000,
2876      "default_max_tokens": 32768,
2877      "can_reason": false,
2878      "supports_attachments": false,
2879      "options": {}
2880    },
2881    {
2882      "id": "qwen/qwen3-coder-plus",
2883      "name": "Qwen: Qwen3 Coder Plus",
2884      "cost_per_1m_in": 1,
2885      "cost_per_1m_out": 5,
2886      "cost_per_1m_in_cached": 0,
2887      "cost_per_1m_out_cached": 0.09999999999999999,
2888      "context_window": 128000,
2889      "default_max_tokens": 32768,
2890      "can_reason": false,
2891      "supports_attachments": false,
2892      "options": {}
2893    },
2894    {
2895      "id": "qwen/qwen3-max",
2896      "name": "Qwen: Qwen3 Max",
2897      "cost_per_1m_in": 1.2,
2898      "cost_per_1m_out": 6,
2899      "cost_per_1m_in_cached": 0,
2900      "cost_per_1m_out_cached": 0.24,
2901      "context_window": 256000,
2902      "default_max_tokens": 16384,
2903      "can_reason": false,
2904      "supports_attachments": false,
2905      "options": {}
2906    },
2907    {
2908      "id": "qwen/qwen3-next-80b-a3b-instruct",
2909      "name": "Qwen: Qwen3 Next 80B A3B Instruct",
2910      "cost_per_1m_in": 0.06,
2911      "cost_per_1m_out": 0.6,
2912      "cost_per_1m_in_cached": 0,
2913      "cost_per_1m_out_cached": 0,
2914      "context_window": 262144,
2915      "default_max_tokens": 26214,
2916      "can_reason": false,
2917      "supports_attachments": false,
2918      "options": {}
2919    },
2920    {
2921      "id": "qwen/qwen3-next-80b-a3b-thinking",
2922      "name": "Qwen: Qwen3 Next 80B A3B Thinking",
2923      "cost_per_1m_in": 0.15,
2924      "cost_per_1m_out": 1.2,
2925      "cost_per_1m_in_cached": 0,
2926      "cost_per_1m_out_cached": 0,
2927      "context_window": 262144,
2928      "default_max_tokens": 131072,
2929      "can_reason": true,
2930      "reasoning_levels": [
2931        "low",
2932        "medium",
2933        "high"
2934      ],
2935      "default_reasoning_effort": "medium",
2936      "supports_attachments": false,
2937      "options": {}
2938    },
2939    {
2940      "id": "qwen/qwen3-vl-235b-a22b-instruct",
2941      "name": "Qwen: Qwen3 VL 235B A22B Instruct",
2942      "cost_per_1m_in": 0.22,
2943      "cost_per_1m_out": 0.88,
2944      "cost_per_1m_in_cached": 0,
2945      "cost_per_1m_out_cached": 0,
2946      "context_window": 262144,
2947      "default_max_tokens": 26214,
2948      "can_reason": false,
2949      "supports_attachments": true,
2950      "options": {}
2951    },
2952    {
2953      "id": "qwen/qwen3-vl-30b-a3b-instruct",
2954      "name": "Qwen: Qwen3 VL 30B A3B Instruct",
2955      "cost_per_1m_in": 0.15,
2956      "cost_per_1m_out": 0.6,
2957      "cost_per_1m_in_cached": 0,
2958      "cost_per_1m_out_cached": 0,
2959      "context_window": 262144,
2960      "default_max_tokens": 26214,
2961      "can_reason": false,
2962      "supports_attachments": true,
2963      "options": {}
2964    },
2965    {
2966      "id": "qwen/qwen3-vl-30b-a3b-thinking",
2967      "name": "Qwen: Qwen3 VL 30B A3B Thinking",
2968      "cost_per_1m_in": 0.19999999999999998,
2969      "cost_per_1m_out": 1,
2970      "cost_per_1m_in_cached": 0,
2971      "cost_per_1m_out_cached": 0,
2972      "context_window": 131072,
2973      "default_max_tokens": 16384,
2974      "can_reason": true,
2975      "reasoning_levels": [
2976        "low",
2977        "medium",
2978        "high"
2979      ],
2980      "default_reasoning_effort": "medium",
2981      "supports_attachments": true,
2982      "options": {}
2983    },
2984    {
2985      "id": "qwen/qwen3-vl-8b-instruct",
2986      "name": "Qwen: Qwen3 VL 8B Instruct",
2987      "cost_per_1m_in": 0.18,
2988      "cost_per_1m_out": 0.7,
2989      "cost_per_1m_in_cached": 0,
2990      "cost_per_1m_out_cached": 0,
2991      "context_window": 256000,
2992      "default_max_tokens": 16384,
2993      "can_reason": false,
2994      "supports_attachments": true,
2995      "options": {}
2996    },
2997    {
2998      "id": "qwen/qwen3-vl-8b-thinking",
2999      "name": "Qwen: Qwen3 VL 8B Thinking",
3000      "cost_per_1m_in": 0.18,
3001      "cost_per_1m_out": 2.0999999999999996,
3002      "cost_per_1m_in_cached": 0,
3003      "cost_per_1m_out_cached": 0,
3004      "context_window": 256000,
3005      "default_max_tokens": 16384,
3006      "can_reason": true,
3007      "reasoning_levels": [
3008        "low",
3009        "medium",
3010        "high"
3011      ],
3012      "default_reasoning_effort": "medium",
3013      "supports_attachments": true,
3014      "options": {}
3015    },
3016    {
3017      "id": "relace/relace-search",
3018      "name": "Relace: Relace Search",
3019      "cost_per_1m_in": 1,
3020      "cost_per_1m_out": 3,
3021      "cost_per_1m_in_cached": 0,
3022      "cost_per_1m_out_cached": 0,
3023      "context_window": 256000,
3024      "default_max_tokens": 64000,
3025      "can_reason": false,
3026      "supports_attachments": false,
3027      "options": {}
3028    },
3029    {
3030      "id": "stepfun-ai/step3",
3031      "name": "StepFun: Step3",
3032      "cost_per_1m_in": 0.5700000000000001,
3033      "cost_per_1m_out": 1.42,
3034      "cost_per_1m_in_cached": 0,
3035      "cost_per_1m_out_cached": 0,
3036      "context_window": 65536,
3037      "default_max_tokens": 32768,
3038      "can_reason": true,
3039      "reasoning_levels": [
3040        "low",
3041        "medium",
3042        "high"
3043      ],
3044      "default_reasoning_effort": "medium",
3045      "supports_attachments": true,
3046      "options": {}
3047    },
3048    {
3049      "id": "tngtech/deepseek-r1t2-chimera",
3050      "name": "TNG: DeepSeek R1T2 Chimera",
3051      "cost_per_1m_in": 0.25,
3052      "cost_per_1m_out": 0.85,
3053      "cost_per_1m_in_cached": 0,
3054      "cost_per_1m_out_cached": 0,
3055      "context_window": 163840,
3056      "default_max_tokens": 81920,
3057      "can_reason": true,
3058      "reasoning_levels": [
3059        "low",
3060        "medium",
3061        "high"
3062      ],
3063      "default_reasoning_effort": "medium",
3064      "supports_attachments": false,
3065      "options": {}
3066    },
3067    {
3068      "id": "tngtech/tng-r1t-chimera",
3069      "name": "TNG: R1T Chimera",
3070      "cost_per_1m_in": 0.25,
3071      "cost_per_1m_out": 0.85,
3072      "cost_per_1m_in_cached": 0,
3073      "cost_per_1m_out_cached": 0,
3074      "context_window": 163840,
3075      "default_max_tokens": 32768,
3076      "can_reason": true,
3077      "reasoning_levels": [
3078        "low",
3079        "medium",
3080        "high"
3081      ],
3082      "default_reasoning_effort": "medium",
3083      "supports_attachments": false,
3084      "options": {}
3085    },
3086    {
3087      "id": "thedrummer/rocinante-12b",
3088      "name": "TheDrummer: Rocinante 12B",
3089      "cost_per_1m_in": 0.16999999999999998,
3090      "cost_per_1m_out": 0.43,
3091      "cost_per_1m_in_cached": 0,
3092      "cost_per_1m_out_cached": 0,
3093      "context_window": 32768,
3094      "default_max_tokens": 3276,
3095      "can_reason": false,
3096      "supports_attachments": false,
3097      "options": {}
3098    },
3099    {
3100      "id": "thedrummer/unslopnemo-12b",
3101      "name": "TheDrummer: UnslopNemo 12B",
3102      "cost_per_1m_in": 0.39999999999999997,
3103      "cost_per_1m_out": 0.39999999999999997,
3104      "cost_per_1m_in_cached": 0,
3105      "cost_per_1m_out_cached": 0,
3106      "context_window": 32768,
3107      "default_max_tokens": 3276,
3108      "can_reason": false,
3109      "supports_attachments": false,
3110      "options": {}
3111    },
3112    {
3113      "id": "alibaba/tongyi-deepresearch-30b-a3b",
3114      "name": "Tongyi DeepResearch 30B A3B",
3115      "cost_per_1m_in": 0.09,
3116      "cost_per_1m_out": 0.39999999999999997,
3117      "cost_per_1m_in_cached": 0,
3118      "cost_per_1m_out_cached": 0,
3119      "context_window": 131072,
3120      "default_max_tokens": 65536,
3121      "can_reason": true,
3122      "reasoning_levels": [
3123        "low",
3124        "medium",
3125        "high"
3126      ],
3127      "default_reasoning_effort": "medium",
3128      "supports_attachments": false,
3129      "options": {}
3130    },
3131    {
3132      "id": "xiaomi/mimo-v2-flash:free",
3133      "name": "Xiaomi: MiMo-V2-Flash (free)",
3134      "cost_per_1m_in": 0,
3135      "cost_per_1m_out": 0,
3136      "cost_per_1m_in_cached": 0,
3137      "cost_per_1m_out_cached": 0,
3138      "context_window": 262144,
3139      "default_max_tokens": 32768,
3140      "can_reason": true,
3141      "reasoning_levels": [
3142        "low",
3143        "medium",
3144        "high"
3145      ],
3146      "default_reasoning_effort": "medium",
3147      "supports_attachments": false,
3148      "options": {}
3149    },
3150    {
3151      "id": "z-ai/glm-4-32b",
3152      "name": "Z.AI: GLM 4 32B ",
3153      "cost_per_1m_in": 0.09999999999999999,
3154      "cost_per_1m_out": 0.09999999999999999,
3155      "cost_per_1m_in_cached": 0,
3156      "cost_per_1m_out_cached": 0,
3157      "context_window": 128000,
3158      "default_max_tokens": 12800,
3159      "can_reason": false,
3160      "supports_attachments": false,
3161      "options": {}
3162    },
3163    {
3164      "id": "z-ai/glm-4.5",
3165      "name": "Z.AI: GLM 4.5",
3166      "cost_per_1m_in": 0.6,
3167      "cost_per_1m_out": 2.2,
3168      "cost_per_1m_in_cached": 0,
3169      "cost_per_1m_out_cached": 0.11,
3170      "context_window": 131072,
3171      "default_max_tokens": 49152,
3172      "can_reason": true,
3173      "reasoning_levels": [
3174        "low",
3175        "medium",
3176        "high"
3177      ],
3178      "default_reasoning_effort": "medium",
3179      "supports_attachments": false,
3180      "options": {}
3181    },
3182    {
3183      "id": "z-ai/glm-4.5-air",
3184      "name": "Z.AI: GLM 4.5 Air",
3185      "cost_per_1m_in": 0.13,
3186      "cost_per_1m_out": 0.85,
3187      "cost_per_1m_in_cached": 0,
3188      "cost_per_1m_out_cached": 0,
3189      "context_window": 131072,
3190      "default_max_tokens": 49152,
3191      "can_reason": true,
3192      "reasoning_levels": [
3193        "low",
3194        "medium",
3195        "high"
3196      ],
3197      "default_reasoning_effort": "medium",
3198      "supports_attachments": false,
3199      "options": {}
3200    },
3201    {
3202      "id": "z-ai/glm-4.5-air:free",
3203      "name": "Z.AI: GLM 4.5 Air (free)",
3204      "cost_per_1m_in": 0,
3205      "cost_per_1m_out": 0,
3206      "cost_per_1m_in_cached": 0,
3207      "cost_per_1m_out_cached": 0,
3208      "context_window": 131072,
3209      "default_max_tokens": 48000,
3210      "can_reason": true,
3211      "reasoning_levels": [
3212        "low",
3213        "medium",
3214        "high"
3215      ],
3216      "default_reasoning_effort": "medium",
3217      "supports_attachments": false,
3218      "options": {}
3219    },
3220    {
3221      "id": "z-ai/glm-4.5v",
3222      "name": "Z.AI: GLM 4.5V",
3223      "cost_per_1m_in": 0.6,
3224      "cost_per_1m_out": 1.7999999999999998,
3225      "cost_per_1m_in_cached": 0,
3226      "cost_per_1m_out_cached": 0.11,
3227      "context_window": 65536,
3228      "default_max_tokens": 8192,
3229      "can_reason": true,
3230      "reasoning_levels": [
3231        "low",
3232        "medium",
3233        "high"
3234      ],
3235      "default_reasoning_effort": "medium",
3236      "supports_attachments": true,
3237      "options": {}
3238    },
3239    {
3240      "id": "z-ai/glm-4.6",
3241      "name": "Z.AI: GLM 4.6",
3242      "cost_per_1m_in": 0.55,
3243      "cost_per_1m_out": 2.2,
3244      "cost_per_1m_in_cached": 0,
3245      "cost_per_1m_out_cached": 0.11,
3246      "context_window": 204800,
3247      "default_max_tokens": 65536,
3248      "can_reason": true,
3249      "reasoning_levels": [
3250        "low",
3251        "medium",
3252        "high"
3253      ],
3254      "default_reasoning_effort": "medium",
3255      "supports_attachments": false,
3256      "options": {}
3257    },
3258    {
3259      "id": "z-ai/glm-4.6:exacto",
3260      "name": "Z.AI: GLM 4.6 (exacto)",
3261      "cost_per_1m_in": 0.44,
3262      "cost_per_1m_out": 1.76,
3263      "cost_per_1m_in_cached": 0,
3264      "cost_per_1m_out_cached": 0,
3265      "context_window": 204800,
3266      "default_max_tokens": 65536,
3267      "can_reason": true,
3268      "reasoning_levels": [
3269        "low",
3270        "medium",
3271        "high"
3272      ],
3273      "default_reasoning_effort": "medium",
3274      "supports_attachments": false,
3275      "options": {}
3276    },
3277    {
3278      "id": "z-ai/glm-4.6v",
3279      "name": "Z.AI: GLM 4.6V",
3280      "cost_per_1m_in": 0.3,
3281      "cost_per_1m_out": 0.8999999999999999,
3282      "cost_per_1m_in_cached": 0,
3283      "cost_per_1m_out_cached": 0,
3284      "context_window": 131072,
3285      "default_max_tokens": 65536,
3286      "can_reason": true,
3287      "reasoning_levels": [
3288        "low",
3289        "medium",
3290        "high"
3291      ],
3292      "default_reasoning_effort": "medium",
3293      "supports_attachments": true,
3294      "options": {}
3295    },
3296    {
3297      "id": "z-ai/glm-4.7",
3298      "name": "Z.AI: GLM 4.7",
3299      "cost_per_1m_in": 0.6,
3300      "cost_per_1m_out": 2.2,
3301      "cost_per_1m_in_cached": 0.11,
3302      "cost_per_1m_out_cached": 0.11,
3303      "context_window": 204800,
3304      "default_max_tokens": 65536,
3305      "can_reason": true,
3306      "reasoning_levels": [
3307        "low",
3308        "medium",
3309        "high"
3310      ],
3311      "default_reasoning_effort": "medium",
3312      "supports_attachments": false,
3313      "options": {}
3314    },
3315    {
3316      "id": "x-ai/grok-3",
3317      "name": "xAI: Grok 3",
3318      "cost_per_1m_in": 3,
3319      "cost_per_1m_out": 15,
3320      "cost_per_1m_in_cached": 0,
3321      "cost_per_1m_out_cached": 0.75,
3322      "context_window": 131072,
3323      "default_max_tokens": 13107,
3324      "can_reason": false,
3325      "supports_attachments": false,
3326      "options": {}
3327    },
3328    {
3329      "id": "x-ai/grok-3-beta",
3330      "name": "xAI: Grok 3 Beta",
3331      "cost_per_1m_in": 3,
3332      "cost_per_1m_out": 15,
3333      "cost_per_1m_in_cached": 0,
3334      "cost_per_1m_out_cached": 0.75,
3335      "context_window": 131072,
3336      "default_max_tokens": 13107,
3337      "can_reason": false,
3338      "supports_attachments": false,
3339      "options": {}
3340    },
3341    {
3342      "id": "x-ai/grok-3-mini",
3343      "name": "xAI: Grok 3 Mini",
3344      "cost_per_1m_in": 0.6,
3345      "cost_per_1m_out": 4,
3346      "cost_per_1m_in_cached": 0,
3347      "cost_per_1m_out_cached": 0.15,
3348      "context_window": 131072,
3349      "default_max_tokens": 13107,
3350      "can_reason": true,
3351      "reasoning_levels": [
3352        "low",
3353        "medium",
3354        "high"
3355      ],
3356      "default_reasoning_effort": "medium",
3357      "supports_attachments": false,
3358      "options": {}
3359    },
3360    {
3361      "id": "x-ai/grok-3-mini-beta",
3362      "name": "xAI: Grok 3 Mini Beta",
3363      "cost_per_1m_in": 0.6,
3364      "cost_per_1m_out": 4,
3365      "cost_per_1m_in_cached": 0,
3366      "cost_per_1m_out_cached": 0.15,
3367      "context_window": 131072,
3368      "default_max_tokens": 13107,
3369      "can_reason": true,
3370      "reasoning_levels": [
3371        "low",
3372        "medium",
3373        "high"
3374      ],
3375      "default_reasoning_effort": "medium",
3376      "supports_attachments": false,
3377      "options": {}
3378    },
3379    {
3380      "id": "x-ai/grok-4",
3381      "name": "xAI: Grok 4",
3382      "cost_per_1m_in": 3,
3383      "cost_per_1m_out": 15,
3384      "cost_per_1m_in_cached": 0,
3385      "cost_per_1m_out_cached": 0.75,
3386      "context_window": 256000,
3387      "default_max_tokens": 25600,
3388      "can_reason": true,
3389      "reasoning_levels": [
3390        "low",
3391        "medium",
3392        "high"
3393      ],
3394      "default_reasoning_effort": "medium",
3395      "supports_attachments": true,
3396      "options": {}
3397    },
3398    {
3399      "id": "x-ai/grok-4-fast",
3400      "name": "xAI: Grok 4 Fast",
3401      "cost_per_1m_in": 0.19999999999999998,
3402      "cost_per_1m_out": 0.5,
3403      "cost_per_1m_in_cached": 0,
3404      "cost_per_1m_out_cached": 0.049999999999999996,
3405      "context_window": 2000000,
3406      "default_max_tokens": 15000,
3407      "can_reason": true,
3408      "reasoning_levels": [
3409        "low",
3410        "medium",
3411        "high"
3412      ],
3413      "default_reasoning_effort": "medium",
3414      "supports_attachments": true,
3415      "options": {}
3416    },
3417    {
3418      "id": "x-ai/grok-4.1-fast",
3419      "name": "xAI: Grok 4.1 Fast",
3420      "cost_per_1m_in": 0.19999999999999998,
3421      "cost_per_1m_out": 0.5,
3422      "cost_per_1m_in_cached": 0,
3423      "cost_per_1m_out_cached": 0.049999999999999996,
3424      "context_window": 2000000,
3425      "default_max_tokens": 15000,
3426      "can_reason": true,
3427      "reasoning_levels": [
3428        "low",
3429        "medium",
3430        "high"
3431      ],
3432      "default_reasoning_effort": "medium",
3433      "supports_attachments": true,
3434      "options": {}
3435    },
3436    {
3437      "id": "x-ai/grok-code-fast-1",
3438      "name": "xAI: Grok Code Fast 1",
3439      "cost_per_1m_in": 0.19999999999999998,
3440      "cost_per_1m_out": 1.5,
3441      "cost_per_1m_in_cached": 0,
3442      "cost_per_1m_out_cached": 0.02,
3443      "context_window": 256000,
3444      "default_max_tokens": 5000,
3445      "can_reason": true,
3446      "reasoning_levels": [
3447        "low",
3448        "medium",
3449        "high"
3450      ],
3451      "default_reasoning_effort": "medium",
3452      "supports_attachments": false,
3453      "options": {}
3454    }
3455  ],
3456  "default_headers": {
3457    "HTTP-Referer": "https://charm.land",
3458    "X-Title": "Crush"
3459  }
3460}