openrouter.json

   1{
   2  "name": "OpenRouter",
   3  "id": "openrouter",
   4  "api_key": "$OPENROUTER_API_KEY",
   5  "api_endpoint": "https://openrouter.ai/api/v1",
   6  "type": "openrouter",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-3.5-haiku",
   9  "models": [
  10    {
  11      "id": "ai21/jamba-large-1.7",
  12      "name": "AI21: Jamba Large 1.7",
  13      "cost_per_1m_in": 2,
  14      "cost_per_1m_out": 8,
  15      "cost_per_1m_in_cached": 0,
  16      "cost_per_1m_out_cached": 0,
  17      "context_window": 256000,
  18      "default_max_tokens": 2048,
  19      "can_reason": false,
  20      "supports_attachments": false,
  21      "options": {}
  22    },
  23    {
  24      "id": "ai21/jamba-mini-1.7",
  25      "name": "AI21: Jamba Mini 1.7",
  26      "cost_per_1m_in": 0.19999999999999998,
  27      "cost_per_1m_out": 0.39999999999999997,
  28      "cost_per_1m_in_cached": 0,
  29      "cost_per_1m_out_cached": 0,
  30      "context_window": 256000,
  31      "default_max_tokens": 2048,
  32      "can_reason": false,
  33      "supports_attachments": false,
  34      "options": {}
  35    },
  36    {
  37      "id": "allenai/olmo-3-7b-instruct",
  38      "name": "AllenAI: Olmo 3 7B Instruct",
  39      "cost_per_1m_in": 0.09999999999999999,
  40      "cost_per_1m_out": 0.19999999999999998,
  41      "cost_per_1m_in_cached": 0,
  42      "cost_per_1m_out_cached": 0,
  43      "context_window": 65536,
  44      "default_max_tokens": 32768,
  45      "can_reason": false,
  46      "supports_attachments": false,
  47      "options": {}
  48    },
  49    {
  50      "id": "amazon/nova-2-lite-v1",
  51      "name": "Amazon: Nova 2 Lite",
  52      "cost_per_1m_in": 0.3,
  53      "cost_per_1m_out": 2.5,
  54      "cost_per_1m_in_cached": 0,
  55      "cost_per_1m_out_cached": 0,
  56      "context_window": 1000000,
  57      "default_max_tokens": 32767,
  58      "can_reason": true,
  59      "reasoning_levels": [
  60        "low",
  61        "medium",
  62        "high"
  63      ],
  64      "default_reasoning_effort": "medium",
  65      "supports_attachments": true,
  66      "options": {}
  67    },
  68    {
  69      "id": "amazon/nova-lite-v1",
  70      "name": "Amazon: Nova Lite 1.0",
  71      "cost_per_1m_in": 0.06,
  72      "cost_per_1m_out": 0.24,
  73      "cost_per_1m_in_cached": 0,
  74      "cost_per_1m_out_cached": 0,
  75      "context_window": 300000,
  76      "default_max_tokens": 2560,
  77      "can_reason": false,
  78      "supports_attachments": true,
  79      "options": {}
  80    },
  81    {
  82      "id": "amazon/nova-micro-v1",
  83      "name": "Amazon: Nova Micro 1.0",
  84      "cost_per_1m_in": 0.035,
  85      "cost_per_1m_out": 0.14,
  86      "cost_per_1m_in_cached": 0,
  87      "cost_per_1m_out_cached": 0,
  88      "context_window": 128000,
  89      "default_max_tokens": 2560,
  90      "can_reason": false,
  91      "supports_attachments": false,
  92      "options": {}
  93    },
  94    {
  95      "id": "amazon/nova-premier-v1",
  96      "name": "Amazon: Nova Premier 1.0",
  97      "cost_per_1m_in": 2.5,
  98      "cost_per_1m_out": 12.5,
  99      "cost_per_1m_in_cached": 0,
 100      "cost_per_1m_out_cached": 0.625,
 101      "context_window": 1000000,
 102      "default_max_tokens": 16000,
 103      "can_reason": false,
 104      "supports_attachments": true,
 105      "options": {}
 106    },
 107    {
 108      "id": "amazon/nova-pro-v1",
 109      "name": "Amazon: Nova Pro 1.0",
 110      "cost_per_1m_in": 0.7999999999999999,
 111      "cost_per_1m_out": 3.1999999999999997,
 112      "cost_per_1m_in_cached": 0,
 113      "cost_per_1m_out_cached": 0,
 114      "context_window": 300000,
 115      "default_max_tokens": 2560,
 116      "can_reason": false,
 117      "supports_attachments": true,
 118      "options": {}
 119    },
 120    {
 121      "id": "anthropic/claude-3-haiku",
 122      "name": "Anthropic: Claude 3 Haiku",
 123      "cost_per_1m_in": 0.25,
 124      "cost_per_1m_out": 1.25,
 125      "cost_per_1m_in_cached": 0.3,
 126      "cost_per_1m_out_cached": 0.03,
 127      "context_window": 200000,
 128      "default_max_tokens": 2048,
 129      "can_reason": false,
 130      "supports_attachments": true,
 131      "options": {}
 132    },
 133    {
 134      "id": "anthropic/claude-3-opus",
 135      "name": "Anthropic: Claude 3 Opus",
 136      "cost_per_1m_in": 15,
 137      "cost_per_1m_out": 75,
 138      "cost_per_1m_in_cached": 18.75,
 139      "cost_per_1m_out_cached": 1.5,
 140      "context_window": 200000,
 141      "default_max_tokens": 2048,
 142      "can_reason": false,
 143      "supports_attachments": true,
 144      "options": {}
 145    },
 146    {
 147      "id": "anthropic/claude-3.5-haiku",
 148      "name": "Anthropic: Claude 3.5 Haiku",
 149      "cost_per_1m_in": 0.7999999999999999,
 150      "cost_per_1m_out": 4,
 151      "cost_per_1m_in_cached": 1,
 152      "cost_per_1m_out_cached": 0.08,
 153      "context_window": 200000,
 154      "default_max_tokens": 4096,
 155      "can_reason": false,
 156      "supports_attachments": true,
 157      "options": {}
 158    },
 159    {
 160      "id": "anthropic/claude-3.5-haiku-20241022",
 161      "name": "Anthropic: Claude 3.5 Haiku (2024-10-22)",
 162      "cost_per_1m_in": 0.7999999999999999,
 163      "cost_per_1m_out": 4,
 164      "cost_per_1m_in_cached": 1,
 165      "cost_per_1m_out_cached": 0.08,
 166      "context_window": 200000,
 167      "default_max_tokens": 4096,
 168      "can_reason": false,
 169      "supports_attachments": true,
 170      "options": {}
 171    },
 172    {
 173      "id": "anthropic/claude-3.5-sonnet",
 174      "name": "Anthropic: Claude 3.5 Sonnet",
 175      "cost_per_1m_in": 6,
 176      "cost_per_1m_out": 30,
 177      "cost_per_1m_in_cached": 0,
 178      "cost_per_1m_out_cached": 0,
 179      "context_window": 200000,
 180      "default_max_tokens": 4096,
 181      "can_reason": false,
 182      "supports_attachments": true,
 183      "options": {}
 184    },
 185    {
 186      "id": "anthropic/claude-3.7-sonnet",
 187      "name": "Anthropic: Claude 3.7 Sonnet",
 188      "cost_per_1m_in": 3,
 189      "cost_per_1m_out": 15,
 190      "cost_per_1m_in_cached": 3.75,
 191      "cost_per_1m_out_cached": 0.3,
 192      "context_window": 200000,
 193      "default_max_tokens": 32000,
 194      "can_reason": true,
 195      "reasoning_levels": [
 196        "low",
 197        "medium",
 198        "high"
 199      ],
 200      "default_reasoning_effort": "medium",
 201      "supports_attachments": true,
 202      "options": {}
 203    },
 204    {
 205      "id": "anthropic/claude-3.7-sonnet:thinking",
 206      "name": "Anthropic: Claude 3.7 Sonnet (thinking)",
 207      "cost_per_1m_in": 3,
 208      "cost_per_1m_out": 15,
 209      "cost_per_1m_in_cached": 3.75,
 210      "cost_per_1m_out_cached": 0.3,
 211      "context_window": 200000,
 212      "default_max_tokens": 32000,
 213      "can_reason": true,
 214      "reasoning_levels": [
 215        "low",
 216        "medium",
 217        "high"
 218      ],
 219      "default_reasoning_effort": "medium",
 220      "supports_attachments": true,
 221      "options": {}
 222    },
 223    {
 224      "id": "anthropic/claude-haiku-4.5",
 225      "name": "Anthropic: Claude Haiku 4.5",
 226      "cost_per_1m_in": 1,
 227      "cost_per_1m_out": 5,
 228      "cost_per_1m_in_cached": 1.25,
 229      "cost_per_1m_out_cached": 0.09999999999999999,
 230      "context_window": 200000,
 231      "default_max_tokens": 32000,
 232      "can_reason": true,
 233      "reasoning_levels": [
 234        "low",
 235        "medium",
 236        "high"
 237      ],
 238      "default_reasoning_effort": "medium",
 239      "supports_attachments": true,
 240      "options": {}
 241    },
 242    {
 243      "id": "anthropic/claude-opus-4",
 244      "name": "Anthropic: Claude Opus 4",
 245      "cost_per_1m_in": 15,
 246      "cost_per_1m_out": 75,
 247      "cost_per_1m_in_cached": 18.75,
 248      "cost_per_1m_out_cached": 1.5,
 249      "context_window": 200000,
 250      "default_max_tokens": 16000,
 251      "can_reason": true,
 252      "reasoning_levels": [
 253        "low",
 254        "medium",
 255        "high"
 256      ],
 257      "default_reasoning_effort": "medium",
 258      "supports_attachments": true,
 259      "options": {}
 260    },
 261    {
 262      "id": "anthropic/claude-opus-4.1",
 263      "name": "Anthropic: Claude Opus 4.1",
 264      "cost_per_1m_in": 15,
 265      "cost_per_1m_out": 75,
 266      "cost_per_1m_in_cached": 18.75,
 267      "cost_per_1m_out_cached": 1.5,
 268      "context_window": 200000,
 269      "default_max_tokens": 20000,
 270      "can_reason": true,
 271      "reasoning_levels": [
 272        "low",
 273        "medium",
 274        "high"
 275      ],
 276      "default_reasoning_effort": "medium",
 277      "supports_attachments": true,
 278      "options": {}
 279    },
 280    {
 281      "id": "anthropic/claude-opus-4.5",
 282      "name": "Anthropic: Claude Opus 4.5",
 283      "cost_per_1m_in": 5,
 284      "cost_per_1m_out": 25,
 285      "cost_per_1m_in_cached": 6.25,
 286      "cost_per_1m_out_cached": 0.5,
 287      "context_window": 200000,
 288      "default_max_tokens": 32000,
 289      "can_reason": true,
 290      "reasoning_levels": [
 291        "low",
 292        "medium",
 293        "high"
 294      ],
 295      "default_reasoning_effort": "medium",
 296      "supports_attachments": true,
 297      "options": {}
 298    },
 299    {
 300      "id": "anthropic/claude-sonnet-4",
 301      "name": "Anthropic: Claude Sonnet 4",
 302      "cost_per_1m_in": 3,
 303      "cost_per_1m_out": 15,
 304      "cost_per_1m_in_cached": 3.75,
 305      "cost_per_1m_out_cached": 0.3,
 306      "context_window": 1000000,
 307      "default_max_tokens": 32000,
 308      "can_reason": true,
 309      "reasoning_levels": [
 310        "low",
 311        "medium",
 312        "high"
 313      ],
 314      "default_reasoning_effort": "medium",
 315      "supports_attachments": true,
 316      "options": {}
 317    },
 318    {
 319      "id": "anthropic/claude-sonnet-4.5",
 320      "name": "Anthropic: Claude Sonnet 4.5",
 321      "cost_per_1m_in": 3,
 322      "cost_per_1m_out": 15,
 323      "cost_per_1m_in_cached": 3.75,
 324      "cost_per_1m_out_cached": 0.3,
 325      "context_window": 1000000,
 326      "default_max_tokens": 32000,
 327      "can_reason": true,
 328      "reasoning_levels": [
 329        "low",
 330        "medium",
 331        "high"
 332      ],
 333      "default_reasoning_effort": "medium",
 334      "supports_attachments": true,
 335      "options": {}
 336    },
 337    {
 338      "id": "arcee-ai/trinity-mini",
 339      "name": "Arcee AI: Trinity Mini",
 340      "cost_per_1m_in": 0.045,
 341      "cost_per_1m_out": 0.15,
 342      "cost_per_1m_in_cached": 0,
 343      "cost_per_1m_out_cached": 0,
 344      "context_window": 131072,
 345      "default_max_tokens": 65536,
 346      "can_reason": true,
 347      "reasoning_levels": [
 348        "low",
 349        "medium",
 350        "high"
 351      ],
 352      "default_reasoning_effort": "medium",
 353      "supports_attachments": false,
 354      "options": {}
 355    },
 356    {
 357      "id": "arcee-ai/trinity-mini:free",
 358      "name": "Arcee AI: Trinity Mini (free)",
 359      "cost_per_1m_in": 0,
 360      "cost_per_1m_out": 0,
 361      "cost_per_1m_in_cached": 0,
 362      "cost_per_1m_out_cached": 0,
 363      "context_window": 131072,
 364      "default_max_tokens": 13107,
 365      "can_reason": true,
 366      "reasoning_levels": [
 367        "low",
 368        "medium",
 369        "high"
 370      ],
 371      "default_reasoning_effort": "medium",
 372      "supports_attachments": false,
 373      "options": {}
 374    },
 375    {
 376      "id": "arcee-ai/virtuoso-large",
 377      "name": "Arcee AI: Virtuoso Large",
 378      "cost_per_1m_in": 0.75,
 379      "cost_per_1m_out": 1.2,
 380      "cost_per_1m_in_cached": 0,
 381      "cost_per_1m_out_cached": 0,
 382      "context_window": 131072,
 383      "default_max_tokens": 32000,
 384      "can_reason": false,
 385      "supports_attachments": false,
 386      "options": {}
 387    },
 388    {
 389      "id": "baidu/ernie-4.5-21b-a3b",
 390      "name": "Baidu: ERNIE 4.5 21B A3B",
 391      "cost_per_1m_in": 0.056,
 392      "cost_per_1m_out": 0.224,
 393      "cost_per_1m_in_cached": 0,
 394      "cost_per_1m_out_cached": 0,
 395      "context_window": 120000,
 396      "default_max_tokens": 4000,
 397      "can_reason": false,
 398      "supports_attachments": false,
 399      "options": {}
 400    },
 401    {
 402      "id": "baidu/ernie-4.5-vl-28b-a3b",
 403      "name": "Baidu: ERNIE 4.5 VL 28B A3B",
 404      "cost_per_1m_in": 0.112,
 405      "cost_per_1m_out": 0.448,
 406      "cost_per_1m_in_cached": 0,
 407      "cost_per_1m_out_cached": 0,
 408      "context_window": 30000,
 409      "default_max_tokens": 4000,
 410      "can_reason": true,
 411      "reasoning_levels": [
 412        "low",
 413        "medium",
 414        "high"
 415      ],
 416      "default_reasoning_effort": "medium",
 417      "supports_attachments": true,
 418      "options": {}
 419    },
 420    {
 421      "id": "deepcogito/cogito-v2-preview-llama-109b-moe",
 422      "name": "Cogito V2 Preview Llama 109B",
 423      "cost_per_1m_in": 0.18,
 424      "cost_per_1m_out": 0.59,
 425      "cost_per_1m_in_cached": 0,
 426      "cost_per_1m_out_cached": 0,
 427      "context_window": 32767,
 428      "default_max_tokens": 3276,
 429      "can_reason": true,
 430      "reasoning_levels": [
 431        "low",
 432        "medium",
 433        "high"
 434      ],
 435      "default_reasoning_effort": "medium",
 436      "supports_attachments": true,
 437      "options": {}
 438    },
 439    {
 440      "id": "cohere/command-r-08-2024",
 441      "name": "Cohere: Command R (08-2024)",
 442      "cost_per_1m_in": 0.15,
 443      "cost_per_1m_out": 0.6,
 444      "cost_per_1m_in_cached": 0,
 445      "cost_per_1m_out_cached": 0,
 446      "context_window": 128000,
 447      "default_max_tokens": 2000,
 448      "can_reason": false,
 449      "supports_attachments": false,
 450      "options": {}
 451    },
 452    {
 453      "id": "cohere/command-r-plus-08-2024",
 454      "name": "Cohere: Command R+ (08-2024)",
 455      "cost_per_1m_in": 2.5,
 456      "cost_per_1m_out": 10,
 457      "cost_per_1m_in_cached": 0,
 458      "cost_per_1m_out_cached": 0,
 459      "context_window": 128000,
 460      "default_max_tokens": 2000,
 461      "can_reason": false,
 462      "supports_attachments": false,
 463      "options": {}
 464    },
 465    {
 466      "id": "deepcogito/cogito-v2-preview-llama-405b",
 467      "name": "Deep Cogito: Cogito V2 Preview Llama 405B",
 468      "cost_per_1m_in": 3.5,
 469      "cost_per_1m_out": 3.5,
 470      "cost_per_1m_in_cached": 0,
 471      "cost_per_1m_out_cached": 0,
 472      "context_window": 32768,
 473      "default_max_tokens": 3276,
 474      "can_reason": true,
 475      "reasoning_levels": [
 476        "low",
 477        "medium",
 478        "high"
 479      ],
 480      "default_reasoning_effort": "medium",
 481      "supports_attachments": false,
 482      "options": {}
 483    },
 484    {
 485      "id": "deepcogito/cogito-v2-preview-llama-70b",
 486      "name": "Deep Cogito: Cogito V2 Preview Llama 70B",
 487      "cost_per_1m_in": 0.88,
 488      "cost_per_1m_out": 0.88,
 489      "cost_per_1m_in_cached": 0,
 490      "cost_per_1m_out_cached": 0,
 491      "context_window": 32768,
 492      "default_max_tokens": 3276,
 493      "can_reason": true,
 494      "reasoning_levels": [
 495        "low",
 496        "medium",
 497        "high"
 498      ],
 499      "default_reasoning_effort": "medium",
 500      "supports_attachments": false,
 501      "options": {}
 502    },
 503    {
 504      "id": "deepseek/deepseek-chat",
 505      "name": "DeepSeek: DeepSeek V3",
 506      "cost_per_1m_in": 0.32,
 507      "cost_per_1m_out": 1.04,
 508      "cost_per_1m_in_cached": 0,
 509      "cost_per_1m_out_cached": 0,
 510      "context_window": 64000,
 511      "default_max_tokens": 8000,
 512      "can_reason": false,
 513      "supports_attachments": false,
 514      "options": {}
 515    },
 516    {
 517      "id": "deepseek/deepseek-chat-v3-0324",
 518      "name": "DeepSeek: DeepSeek V3 0324",
 519      "cost_per_1m_in": 0.77,
 520      "cost_per_1m_out": 0.77,
 521      "cost_per_1m_in_cached": 0,
 522      "cost_per_1m_out_cached": 0,
 523      "context_window": 163840,
 524      "default_max_tokens": 65536,
 525      "can_reason": true,
 526      "reasoning_levels": [
 527        "low",
 528        "medium",
 529        "high"
 530      ],
 531      "default_reasoning_effort": "medium",
 532      "supports_attachments": false,
 533      "options": {}
 534    },
 535    {
 536      "id": "deepseek/deepseek-v3.1-terminus",
 537      "name": "DeepSeek: DeepSeek V3.1 Terminus",
 538      "cost_per_1m_in": 0.21,
 539      "cost_per_1m_out": 0.7899999999999999,
 540      "cost_per_1m_in_cached": 0,
 541      "cost_per_1m_out_cached": 0.16799999999999998,
 542      "context_window": 163840,
 543      "default_max_tokens": 16384,
 544      "can_reason": true,
 545      "reasoning_levels": [
 546        "low",
 547        "medium",
 548        "high"
 549      ],
 550      "default_reasoning_effort": "medium",
 551      "supports_attachments": false,
 552      "options": {}
 553    },
 554    {
 555      "id": "deepseek/deepseek-v3.1-terminus:exacto",
 556      "name": "DeepSeek: DeepSeek V3.1 Terminus (exacto)",
 557      "cost_per_1m_in": 0.21,
 558      "cost_per_1m_out": 0.7899999999999999,
 559      "cost_per_1m_in_cached": 0,
 560      "cost_per_1m_out_cached": 0.16799999999999998,
 561      "context_window": 163840,
 562      "default_max_tokens": 16384,
 563      "can_reason": true,
 564      "reasoning_levels": [
 565        "low",
 566        "medium",
 567        "high"
 568      ],
 569      "default_reasoning_effort": "medium",
 570      "supports_attachments": false,
 571      "options": {}
 572    },
 573    {
 574      "id": "deepseek/deepseek-v3.2",
 575      "name": "DeepSeek: DeepSeek V3.2",
 576      "cost_per_1m_in": 0.26,
 577      "cost_per_1m_out": 0.39,
 578      "cost_per_1m_in_cached": 0,
 579      "cost_per_1m_out_cached": 0.13,
 580      "context_window": 163840,
 581      "default_max_tokens": 16384,
 582      "can_reason": true,
 583      "reasoning_levels": [
 584        "low",
 585        "medium",
 586        "high"
 587      ],
 588      "default_reasoning_effort": "medium",
 589      "supports_attachments": false,
 590      "options": {}
 591    },
 592    {
 593      "id": "deepseek/deepseek-v3.2-exp",
 594      "name": "DeepSeek: DeepSeek V3.2 Exp",
 595      "cost_per_1m_in": 0.21,
 596      "cost_per_1m_out": 0.32,
 597      "cost_per_1m_in_cached": 0,
 598      "cost_per_1m_out_cached": 0.16799999999999998,
 599      "context_window": 163840,
 600      "default_max_tokens": 16384,
 601      "can_reason": true,
 602      "reasoning_levels": [
 603        "low",
 604        "medium",
 605        "high"
 606      ],
 607      "default_reasoning_effort": "medium",
 608      "supports_attachments": false,
 609      "options": {}
 610    },
 611    {
 612      "id": "deepseek/deepseek-r1",
 613      "name": "DeepSeek: R1",
 614      "cost_per_1m_in": 0.7,
 615      "cost_per_1m_out": 2.4,
 616      "cost_per_1m_in_cached": 0,
 617      "cost_per_1m_out_cached": 0,
 618      "context_window": 163840,
 619      "default_max_tokens": 81920,
 620      "can_reason": true,
 621      "reasoning_levels": [
 622        "low",
 623        "medium",
 624        "high"
 625      ],
 626      "default_reasoning_effort": "medium",
 627      "supports_attachments": false,
 628      "options": {}
 629    },
 630    {
 631      "id": "deepseek/deepseek-r1-0528",
 632      "name": "DeepSeek: R1 0528",
 633      "cost_per_1m_in": 0.39999999999999997,
 634      "cost_per_1m_out": 1.75,
 635      "cost_per_1m_in_cached": 0,
 636      "cost_per_1m_out_cached": 0,
 637      "context_window": 163840,
 638      "default_max_tokens": 81920,
 639      "can_reason": true,
 640      "reasoning_levels": [
 641        "low",
 642        "medium",
 643        "high"
 644      ],
 645      "default_reasoning_effort": "medium",
 646      "supports_attachments": false,
 647      "options": {}
 648    },
 649    {
 650      "id": "deepseek/deepseek-r1-distill-llama-70b",
 651      "name": "DeepSeek: R1 Distill Llama 70B",
 652      "cost_per_1m_in": 0.03,
 653      "cost_per_1m_out": 0.11,
 654      "cost_per_1m_in_cached": 0,
 655      "cost_per_1m_out_cached": 0,
 656      "context_window": 131072,
 657      "default_max_tokens": 65536,
 658      "can_reason": true,
 659      "reasoning_levels": [
 660        "low",
 661        "medium",
 662        "high"
 663      ],
 664      "default_reasoning_effort": "medium",
 665      "supports_attachments": false,
 666      "options": {}
 667    },
 668    {
 669      "id": "google/gemini-2.0-flash-001",
 670      "name": "Google: Gemini 2.0 Flash",
 671      "cost_per_1m_in": 0.09999999999999999,
 672      "cost_per_1m_out": 0.39999999999999997,
 673      "cost_per_1m_in_cached": 0.18330000000000002,
 674      "cost_per_1m_out_cached": 0.024999999999999998,
 675      "context_window": 1048576,
 676      "default_max_tokens": 4096,
 677      "can_reason": false,
 678      "supports_attachments": true,
 679      "options": {}
 680    },
 681    {
 682      "id": "google/gemini-2.0-flash-exp:free",
 683      "name": "Google: Gemini 2.0 Flash Experimental (free)",
 684      "cost_per_1m_in": 0,
 685      "cost_per_1m_out": 0,
 686      "cost_per_1m_in_cached": 0,
 687      "cost_per_1m_out_cached": 0,
 688      "context_window": 1048576,
 689      "default_max_tokens": 4096,
 690      "can_reason": false,
 691      "supports_attachments": true,
 692      "options": {}
 693    },
 694    {
 695      "id": "google/gemini-2.0-flash-lite-001",
 696      "name": "Google: Gemini 2.0 Flash Lite",
 697      "cost_per_1m_in": 0.075,
 698      "cost_per_1m_out": 0.3,
 699      "cost_per_1m_in_cached": 0,
 700      "cost_per_1m_out_cached": 0,
 701      "context_window": 1048576,
 702      "default_max_tokens": 4096,
 703      "can_reason": false,
 704      "supports_attachments": true,
 705      "options": {}
 706    },
 707    {
 708      "id": "google/gemini-2.5-flash",
 709      "name": "Google: Gemini 2.5 Flash",
 710      "cost_per_1m_in": 0.3,
 711      "cost_per_1m_out": 2.5,
 712      "cost_per_1m_in_cached": 0.3833,
 713      "cost_per_1m_out_cached": 0.03,
 714      "context_window": 1048576,
 715      "default_max_tokens": 32767,
 716      "can_reason": true,
 717      "reasoning_levels": [
 718        "low",
 719        "medium",
 720        "high"
 721      ],
 722      "default_reasoning_effort": "medium",
 723      "supports_attachments": true,
 724      "options": {}
 725    },
 726    {
 727      "id": "google/gemini-2.5-flash-lite",
 728      "name": "Google: Gemini 2.5 Flash Lite",
 729      "cost_per_1m_in": 0.09999999999999999,
 730      "cost_per_1m_out": 0.39999999999999997,
 731      "cost_per_1m_in_cached": 0.18330000000000002,
 732      "cost_per_1m_out_cached": 0.024999999999999998,
 733      "context_window": 1048576,
 734      "default_max_tokens": 32767,
 735      "can_reason": true,
 736      "reasoning_levels": [
 737        "low",
 738        "medium",
 739        "high"
 740      ],
 741      "default_reasoning_effort": "medium",
 742      "supports_attachments": true,
 743      "options": {}
 744    },
 745    {
 746      "id": "google/gemini-2.5-flash-lite-preview-09-2025",
 747      "name": "Google: Gemini 2.5 Flash Lite Preview 09-2025",
 748      "cost_per_1m_in": 0.09999999999999999,
 749      "cost_per_1m_out": 0.39999999999999997,
 750      "cost_per_1m_in_cached": 0,
 751      "cost_per_1m_out_cached": 0,
 752      "context_window": 1048576,
 753      "default_max_tokens": 32768,
 754      "can_reason": true,
 755      "reasoning_levels": [
 756        "low",
 757        "medium",
 758        "high"
 759      ],
 760      "default_reasoning_effort": "medium",
 761      "supports_attachments": true,
 762      "options": {}
 763    },
 764    {
 765      "id": "google/gemini-2.5-flash-preview-09-2025",
 766      "name": "Google: Gemini 2.5 Flash Preview 09-2025",
 767      "cost_per_1m_in": 0.3,
 768      "cost_per_1m_out": 2.5,
 769      "cost_per_1m_in_cached": 0.3833,
 770      "cost_per_1m_out_cached": 0.075,
 771      "context_window": 1048576,
 772      "default_max_tokens": 32768,
 773      "can_reason": true,
 774      "reasoning_levels": [
 775        "low",
 776        "medium",
 777        "high"
 778      ],
 779      "default_reasoning_effort": "medium",
 780      "supports_attachments": true,
 781      "options": {}
 782    },
 783    {
 784      "id": "google/gemini-2.5-pro",
 785      "name": "Google: Gemini 2.5 Pro",
 786      "cost_per_1m_in": 1.25,
 787      "cost_per_1m_out": 10,
 788      "cost_per_1m_in_cached": 1.625,
 789      "cost_per_1m_out_cached": 0.125,
 790      "context_window": 1048576,
 791      "default_max_tokens": 32768,
 792      "can_reason": true,
 793      "reasoning_levels": [
 794        "low",
 795        "medium",
 796        "high"
 797      ],
 798      "default_reasoning_effort": "medium",
 799      "supports_attachments": true,
 800      "options": {}
 801    },
 802    {
 803      "id": "google/gemini-2.5-pro-preview-05-06",
 804      "name": "Google: Gemini 2.5 Pro Preview 05-06",
 805      "cost_per_1m_in": 1.25,
 806      "cost_per_1m_out": 10,
 807      "cost_per_1m_in_cached": 1.625,
 808      "cost_per_1m_out_cached": 0.125,
 809      "context_window": 1048576,
 810      "default_max_tokens": 32768,
 811      "can_reason": true,
 812      "reasoning_levels": [
 813        "low",
 814        "medium",
 815        "high"
 816      ],
 817      "default_reasoning_effort": "medium",
 818      "supports_attachments": true,
 819      "options": {}
 820    },
 821    {
 822      "id": "google/gemini-2.5-pro-preview",
 823      "name": "Google: Gemini 2.5 Pro Preview 06-05",
 824      "cost_per_1m_in": 1.25,
 825      "cost_per_1m_out": 10,
 826      "cost_per_1m_in_cached": 1.625,
 827      "cost_per_1m_out_cached": 0.125,
 828      "context_window": 1048576,
 829      "default_max_tokens": 32768,
 830      "can_reason": true,
 831      "reasoning_levels": [
 832        "low",
 833        "medium",
 834        "high"
 835      ],
 836      "default_reasoning_effort": "medium",
 837      "supports_attachments": true,
 838      "options": {}
 839    },
 840    {
 841      "id": "google/gemini-3-flash-preview",
 842      "name": "Google: Gemini 3 Flash Preview",
 843      "cost_per_1m_in": 0.5,
 844      "cost_per_1m_out": 3,
 845      "cost_per_1m_in_cached": 0,
 846      "cost_per_1m_out_cached": 0.049999999999999996,
 847      "context_window": 1048576,
 848      "default_max_tokens": 32767,
 849      "can_reason": true,
 850      "reasoning_levels": [
 851        "low",
 852        "medium",
 853        "high"
 854      ],
 855      "default_reasoning_effort": "medium",
 856      "supports_attachments": true,
 857      "options": {}
 858    },
 859    {
 860      "id": "google/gemini-3-pro-preview",
 861      "name": "Google: Gemini 3 Pro Preview",
 862      "cost_per_1m_in": 2,
 863      "cost_per_1m_out": 12,
 864      "cost_per_1m_in_cached": 2.375,
 865      "cost_per_1m_out_cached": 0.19999999999999998,
 866      "context_window": 1048576,
 867      "default_max_tokens": 32768,
 868      "can_reason": true,
 869      "reasoning_levels": [
 870        "low",
 871        "medium",
 872        "high"
 873      ],
 874      "default_reasoning_effort": "medium",
 875      "supports_attachments": true,
 876      "options": {}
 877    },
 878    {
 879      "id": "google/gemma-3-27b-it",
 880      "name": "Google: Gemma 3 27B",
 881      "cost_per_1m_in": 0.04,
 882      "cost_per_1m_out": 0.15,
 883      "cost_per_1m_in_cached": 0,
 884      "cost_per_1m_out_cached": 0,
 885      "context_window": 96000,
 886      "default_max_tokens": 48000,
 887      "can_reason": false,
 888      "supports_attachments": true,
 889      "options": {}
 890    },
 891    {
 892      "id": "inception/mercury",
 893      "name": "Inception: Mercury",
 894      "cost_per_1m_in": 0.25,
 895      "cost_per_1m_out": 1,
 896      "cost_per_1m_in_cached": 0,
 897      "cost_per_1m_out_cached": 0,
 898      "context_window": 128000,
 899      "default_max_tokens": 8192,
 900      "can_reason": false,
 901      "supports_attachments": false,
 902      "options": {}
 903    },
 904    {
 905      "id": "inception/mercury-coder",
 906      "name": "Inception: Mercury Coder",
 907      "cost_per_1m_in": 0.25,
 908      "cost_per_1m_out": 1,
 909      "cost_per_1m_in_cached": 0,
 910      "cost_per_1m_out_cached": 0,
 911      "context_window": 128000,
 912      "default_max_tokens": 8192,
 913      "can_reason": false,
 914      "supports_attachments": false,
 915      "options": {}
 916    },
 917    {
 918      "id": "kwaipilot/kat-coder-pro:free",
 919      "name": "Kwaipilot: KAT-Coder-Pro V1 (free)",
 920      "cost_per_1m_in": 0,
 921      "cost_per_1m_out": 0,
 922      "cost_per_1m_in_cached": 0,
 923      "cost_per_1m_out_cached": 0,
 924      "context_window": 256000,
 925      "default_max_tokens": 16384,
 926      "can_reason": false,
 927      "supports_attachments": false,
 928      "options": {}
 929    },
 930    {
 931      "id": "meta-llama/llama-3.1-405b-instruct",
 932      "name": "Meta: Llama 3.1 405B Instruct",
 933      "cost_per_1m_in": 5,
 934      "cost_per_1m_out": 16,
 935      "cost_per_1m_in_cached": 0,
 936      "cost_per_1m_out_cached": 0,
 937      "context_window": 128000,
 938      "default_max_tokens": 4096,
 939      "can_reason": false,
 940      "supports_attachments": false,
 941      "options": {}
 942    },
 943    {
 944      "id": "meta-llama/llama-3.1-70b-instruct",
 945      "name": "Meta: Llama 3.1 70B Instruct",
 946      "cost_per_1m_in": 0.39999999999999997,
 947      "cost_per_1m_out": 0.39999999999999997,
 948      "cost_per_1m_in_cached": 0,
 949      "cost_per_1m_out_cached": 0,
 950      "context_window": 131072,
 951      "default_max_tokens": 8192,
 952      "can_reason": false,
 953      "supports_attachments": false,
 954      "options": {}
 955    },
 956    {
 957      "id": "meta-llama/llama-3.1-8b-instruct",
 958      "name": "Meta: Llama 3.1 8B Instruct",
 959      "cost_per_1m_in": 0.03,
 960      "cost_per_1m_out": 0.09,
 961      "cost_per_1m_in_cached": 0,
 962      "cost_per_1m_out_cached": 0,
 963      "context_window": 131072,
 964      "default_max_tokens": 13107,
 965      "can_reason": false,
 966      "supports_attachments": false,
 967      "options": {}
 968    },
 969    {
 970      "id": "meta-llama/llama-3.2-3b-instruct",
 971      "name": "Meta: Llama 3.2 3B Instruct",
 972      "cost_per_1m_in": 0.024,
 973      "cost_per_1m_out": 0.04,
 974      "cost_per_1m_in_cached": 0,
 975      "cost_per_1m_out_cached": 0,
 976      "context_window": 32768,
 977      "default_max_tokens": 16000,
 978      "can_reason": false,
 979      "supports_attachments": false,
 980      "options": {}
 981    },
 982    {
 983      "id": "meta-llama/llama-3.3-70b-instruct",
 984      "name": "Meta: Llama 3.3 70B Instruct",
 985      "cost_per_1m_in": 0.09999999999999999,
 986      "cost_per_1m_out": 0.32,
 987      "cost_per_1m_in_cached": 0,
 988      "cost_per_1m_out_cached": 0,
 989      "context_window": 131072,
 990      "default_max_tokens": 8192,
 991      "can_reason": false,
 992      "supports_attachments": false,
 993      "options": {}
 994    },
 995    {
 996      "id": "meta-llama/llama-3.3-70b-instruct:free",
 997      "name": "Meta: Llama 3.3 70B Instruct (free)",
 998      "cost_per_1m_in": 0,
 999      "cost_per_1m_out": 0,
1000      "cost_per_1m_in_cached": 0,
1001      "cost_per_1m_out_cached": 0,
1002      "context_window": 131072,
1003      "default_max_tokens": 13107,
1004      "can_reason": false,
1005      "supports_attachments": false,
1006      "options": {}
1007    },
1008    {
1009      "id": "meta-llama/llama-4-maverick",
1010      "name": "Meta: Llama 4 Maverick",
1011      "cost_per_1m_in": 0.27,
1012      "cost_per_1m_out": 0.85,
1013      "cost_per_1m_in_cached": 0,
1014      "cost_per_1m_out_cached": 0,
1015      "context_window": 1048576,
1016      "default_max_tokens": 104857,
1017      "can_reason": false,
1018      "supports_attachments": true,
1019      "options": {}
1020    },
1021    {
1022      "id": "meta-llama/llama-4-scout",
1023      "name": "Meta: Llama 4 Scout",
1024      "cost_per_1m_in": 0.25,
1025      "cost_per_1m_out": 0.7,
1026      "cost_per_1m_in_cached": 0,
1027      "cost_per_1m_out_cached": 0,
1028      "context_window": 1310720,
1029      "default_max_tokens": 4096,
1030      "can_reason": false,
1031      "supports_attachments": true,
1032      "options": {}
1033    },
1034    {
1035      "id": "microsoft/phi-3-medium-128k-instruct",
1036      "name": "Microsoft: Phi-3 Medium 128K Instruct",
1037      "cost_per_1m_in": 1,
1038      "cost_per_1m_out": 1,
1039      "cost_per_1m_in_cached": 0,
1040      "cost_per_1m_out_cached": 0,
1041      "context_window": 128000,
1042      "default_max_tokens": 12800,
1043      "can_reason": false,
1044      "supports_attachments": false,
1045      "options": {}
1046    },
1047    {
1048      "id": "microsoft/phi-3-mini-128k-instruct",
1049      "name": "Microsoft: Phi-3 Mini 128K Instruct",
1050      "cost_per_1m_in": 0.09999999999999999,
1051      "cost_per_1m_out": 0.09999999999999999,
1052      "cost_per_1m_in_cached": 0,
1053      "cost_per_1m_out_cached": 0,
1054      "context_window": 128000,
1055      "default_max_tokens": 12800,
1056      "can_reason": false,
1057      "supports_attachments": false,
1058      "options": {}
1059    },
1060    {
1061      "id": "microsoft/phi-3.5-mini-128k-instruct",
1062      "name": "Microsoft: Phi-3.5 Mini 128K Instruct",
1063      "cost_per_1m_in": 0.09999999999999999,
1064      "cost_per_1m_out": 0.09999999999999999,
1065      "cost_per_1m_in_cached": 0,
1066      "cost_per_1m_out_cached": 0,
1067      "context_window": 128000,
1068      "default_max_tokens": 12800,
1069      "can_reason": false,
1070      "supports_attachments": false,
1071      "options": {}
1072    },
1073    {
1074      "id": "minimax/minimax-m2",
1075      "name": "MiniMax: MiniMax M2",
1076      "cost_per_1m_in": 0.254,
1077      "cost_per_1m_out": 1.02,
1078      "cost_per_1m_in_cached": 0,
1079      "cost_per_1m_out_cached": 0.127,
1080      "context_window": 262144,
1081      "default_max_tokens": 26214,
1082      "can_reason": true,
1083      "reasoning_levels": [
1084        "low",
1085        "medium",
1086        "high"
1087      ],
1088      "default_reasoning_effort": "medium",
1089      "supports_attachments": false,
1090      "options": {}
1091    },
1092    {
1093      "id": "mistralai/mistral-large",
1094      "name": "Mistral Large",
1095      "cost_per_1m_in": 2,
1096      "cost_per_1m_out": 6,
1097      "cost_per_1m_in_cached": 0,
1098      "cost_per_1m_out_cached": 0,
1099      "context_window": 128000,
1100      "default_max_tokens": 12800,
1101      "can_reason": false,
1102      "supports_attachments": false,
1103      "options": {}
1104    },
1105    {
1106      "id": "mistralai/mistral-large-2407",
1107      "name": "Mistral Large 2407",
1108      "cost_per_1m_in": 2,
1109      "cost_per_1m_out": 6,
1110      "cost_per_1m_in_cached": 0,
1111      "cost_per_1m_out_cached": 0,
1112      "context_window": 131072,
1113      "default_max_tokens": 13107,
1114      "can_reason": false,
1115      "supports_attachments": false,
1116      "options": {}
1117    },
1118    {
1119      "id": "mistralai/mistral-large-2411",
1120      "name": "Mistral Large 2411",
1121      "cost_per_1m_in": 2,
1122      "cost_per_1m_out": 6,
1123      "cost_per_1m_in_cached": 0,
1124      "cost_per_1m_out_cached": 0,
1125      "context_window": 131072,
1126      "default_max_tokens": 13107,
1127      "can_reason": false,
1128      "supports_attachments": false,
1129      "options": {}
1130    },
1131    {
1132      "id": "mistralai/mistral-tiny",
1133      "name": "Mistral Tiny",
1134      "cost_per_1m_in": 0.25,
1135      "cost_per_1m_out": 0.25,
1136      "cost_per_1m_in_cached": 0,
1137      "cost_per_1m_out_cached": 0,
1138      "context_window": 32768,
1139      "default_max_tokens": 3276,
1140      "can_reason": false,
1141      "supports_attachments": false,
1142      "options": {}
1143    },
1144    {
1145      "id": "mistralai/codestral-2508",
1146      "name": "Mistral: Codestral 2508",
1147      "cost_per_1m_in": 0.3,
1148      "cost_per_1m_out": 0.8999999999999999,
1149      "cost_per_1m_in_cached": 0,
1150      "cost_per_1m_out_cached": 0,
1151      "context_window": 256000,
1152      "default_max_tokens": 25600,
1153      "can_reason": false,
1154      "supports_attachments": false,
1155      "options": {}
1156    },
1157    {
1158      "id": "mistralai/devstral-2512",
1159      "name": "Mistral: Devstral 2 2512",
1160      "cost_per_1m_in": 0.049999999999999996,
1161      "cost_per_1m_out": 0.22,
1162      "cost_per_1m_in_cached": 0,
1163      "cost_per_1m_out_cached": 0,
1164      "context_window": 262144,
1165      "default_max_tokens": 32768,
1166      "can_reason": false,
1167      "supports_attachments": false,
1168      "options": {}
1169    },
1170    {
1171      "id": "mistralai/devstral-2512:free",
1172      "name": "Mistral: Devstral 2 2512 (free)",
1173      "cost_per_1m_in": 0,
1174      "cost_per_1m_out": 0,
1175      "cost_per_1m_in_cached": 0,
1176      "cost_per_1m_out_cached": 0,
1177      "context_window": 262144,
1178      "default_max_tokens": 26214,
1179      "can_reason": false,
1180      "supports_attachments": false,
1181      "options": {}
1182    },
1183    {
1184      "id": "mistralai/devstral-medium",
1185      "name": "Mistral: Devstral Medium",
1186      "cost_per_1m_in": 0.39999999999999997,
1187      "cost_per_1m_out": 2,
1188      "cost_per_1m_in_cached": 0,
1189      "cost_per_1m_out_cached": 0,
1190      "context_window": 131072,
1191      "default_max_tokens": 13107,
1192      "can_reason": false,
1193      "supports_attachments": false,
1194      "options": {}
1195    },
1196    {
1197      "id": "mistralai/devstral-small",
1198      "name": "Mistral: Devstral Small 1.1",
1199      "cost_per_1m_in": 0.09999999999999999,
1200      "cost_per_1m_out": 0.3,
1201      "cost_per_1m_in_cached": 0,
1202      "cost_per_1m_out_cached": 0,
1203      "context_window": 131072,
1204      "default_max_tokens": 13107,
1205      "can_reason": false,
1206      "supports_attachments": false,
1207      "options": {}
1208    },
1209    {
1210      "id": "mistralai/ministral-14b-2512",
1211      "name": "Mistral: Ministral 3 14B 2512",
1212      "cost_per_1m_in": 0.19999999999999998,
1213      "cost_per_1m_out": 0.19999999999999998,
1214      "cost_per_1m_in_cached": 0,
1215      "cost_per_1m_out_cached": 0,
1216      "context_window": 262144,
1217      "default_max_tokens": 26214,
1218      "can_reason": false,
1219      "supports_attachments": true,
1220      "options": {}
1221    },
1222    {
1223      "id": "mistralai/ministral-3b-2512",
1224      "name": "Mistral: Ministral 3 3B 2512",
1225      "cost_per_1m_in": 0.09999999999999999,
1226      "cost_per_1m_out": 0.09999999999999999,
1227      "cost_per_1m_in_cached": 0,
1228      "cost_per_1m_out_cached": 0,
1229      "context_window": 131072,
1230      "default_max_tokens": 13107,
1231      "can_reason": false,
1232      "supports_attachments": true,
1233      "options": {}
1234    },
1235    {
1236      "id": "mistralai/ministral-8b-2512",
1237      "name": "Mistral: Ministral 3 8B 2512",
1238      "cost_per_1m_in": 0.15,
1239      "cost_per_1m_out": 0.15,
1240      "cost_per_1m_in_cached": 0,
1241      "cost_per_1m_out_cached": 0,
1242      "context_window": 262144,
1243      "default_max_tokens": 26214,
1244      "can_reason": false,
1245      "supports_attachments": true,
1246      "options": {}
1247    },
1248    {
1249      "id": "mistralai/ministral-3b",
1250      "name": "Mistral: Ministral 3B",
1251      "cost_per_1m_in": 0.04,
1252      "cost_per_1m_out": 0.04,
1253      "cost_per_1m_in_cached": 0,
1254      "cost_per_1m_out_cached": 0,
1255      "context_window": 131072,
1256      "default_max_tokens": 13107,
1257      "can_reason": false,
1258      "supports_attachments": false,
1259      "options": {}
1260    },
1261    {
1262      "id": "mistralai/ministral-8b",
1263      "name": "Mistral: Ministral 8B",
1264      "cost_per_1m_in": 0.09999999999999999,
1265      "cost_per_1m_out": 0.09999999999999999,
1266      "cost_per_1m_in_cached": 0,
1267      "cost_per_1m_out_cached": 0,
1268      "context_window": 131072,
1269      "default_max_tokens": 13107,
1270      "can_reason": false,
1271      "supports_attachments": false,
1272      "options": {}
1273    },
1274    {
1275      "id": "mistralai/mistral-7b-instruct",
1276      "name": "Mistral: Mistral 7B Instruct",
1277      "cost_per_1m_in": 0.028,
1278      "cost_per_1m_out": 0.054,
1279      "cost_per_1m_in_cached": 0,
1280      "cost_per_1m_out_cached": 0,
1281      "context_window": 32768,
1282      "default_max_tokens": 8192,
1283      "can_reason": false,
1284      "supports_attachments": false,
1285      "options": {}
1286    },
1287    {
1288      "id": "mistralai/mistral-7b-instruct:free",
1289      "name": "Mistral: Mistral 7B Instruct (free)",
1290      "cost_per_1m_in": 0,
1291      "cost_per_1m_out": 0,
1292      "cost_per_1m_in_cached": 0,
1293      "cost_per_1m_out_cached": 0,
1294      "context_window": 32768,
1295      "default_max_tokens": 8192,
1296      "can_reason": false,
1297      "supports_attachments": false,
1298      "options": {}
1299    },
1300    {
1301      "id": "mistralai/mistral-large-2512",
1302      "name": "Mistral: Mistral Large 3 2512",
1303      "cost_per_1m_in": 0.5,
1304      "cost_per_1m_out": 1.5,
1305      "cost_per_1m_in_cached": 0,
1306      "cost_per_1m_out_cached": 0,
1307      "context_window": 262144,
1308      "default_max_tokens": 26214,
1309      "can_reason": false,
1310      "supports_attachments": true,
1311      "options": {}
1312    },
1313    {
1314      "id": "mistralai/mistral-medium-3",
1315      "name": "Mistral: Mistral Medium 3",
1316      "cost_per_1m_in": 0.39999999999999997,
1317      "cost_per_1m_out": 2,
1318      "cost_per_1m_in_cached": 0,
1319      "cost_per_1m_out_cached": 0,
1320      "context_window": 131072,
1321      "default_max_tokens": 13107,
1322      "can_reason": false,
1323      "supports_attachments": true,
1324      "options": {}
1325    },
1326    {
1327      "id": "mistralai/mistral-medium-3.1",
1328      "name": "Mistral: Mistral Medium 3.1",
1329      "cost_per_1m_in": 0.39999999999999997,
1330      "cost_per_1m_out": 2,
1331      "cost_per_1m_in_cached": 0,
1332      "cost_per_1m_out_cached": 0,
1333      "context_window": 131072,
1334      "default_max_tokens": 13107,
1335      "can_reason": false,
1336      "supports_attachments": true,
1337      "options": {}
1338    },
1339    {
1340      "id": "mistralai/mistral-nemo",
1341      "name": "Mistral: Mistral Nemo",
1342      "cost_per_1m_in": 0.15,
1343      "cost_per_1m_out": 0.15,
1344      "cost_per_1m_in_cached": 0,
1345      "cost_per_1m_out_cached": 0,
1346      "context_window": 131072,
1347      "default_max_tokens": 13107,
1348      "can_reason": false,
1349      "supports_attachments": false,
1350      "options": {}
1351    },
1352    {
1353      "id": "mistralai/mistral-small-24b-instruct-2501",
1354      "name": "Mistral: Mistral Small 3",
1355      "cost_per_1m_in": 0.09999999999999999,
1356      "cost_per_1m_out": 0.3,
1357      "cost_per_1m_in_cached": 0,
1358      "cost_per_1m_out_cached": 0,
1359      "context_window": 32768,
1360      "default_max_tokens": 1024,
1361      "can_reason": false,
1362      "supports_attachments": false,
1363      "options": {}
1364    },
1365    {
1366      "id": "mistralai/mistral-small-3.1-24b-instruct",
1367      "name": "Mistral: Mistral Small 3.1 24B",
1368      "cost_per_1m_in": 0.03,
1369      "cost_per_1m_out": 0.11,
1370      "cost_per_1m_in_cached": 0,
1371      "cost_per_1m_out_cached": 0,
1372      "context_window": 131072,
1373      "default_max_tokens": 65536,
1374      "can_reason": false,
1375      "supports_attachments": true,
1376      "options": {}
1377    },
1378    {
1379      "id": "mistralai/mistral-small-3.1-24b-instruct:free",
1380      "name": "Mistral: Mistral Small 3.1 24B (free)",
1381      "cost_per_1m_in": 0,
1382      "cost_per_1m_out": 0,
1383      "cost_per_1m_in_cached": 0,
1384      "cost_per_1m_out_cached": 0,
1385      "context_window": 128000,
1386      "default_max_tokens": 12800,
1387      "can_reason": false,
1388      "supports_attachments": true,
1389      "options": {}
1390    },
1391    {
1392      "id": "mistralai/mistral-small-3.2-24b-instruct",
1393      "name": "Mistral: Mistral Small 3.2 24B",
1394      "cost_per_1m_in": 0.06,
1395      "cost_per_1m_out": 0.18,
1396      "cost_per_1m_in_cached": 0,
1397      "cost_per_1m_out_cached": 0,
1398      "context_window": 131072,
1399      "default_max_tokens": 65536,
1400      "can_reason": false,
1401      "supports_attachments": true,
1402      "options": {}
1403    },
1404    {
1405      "id": "mistralai/mistral-small-creative",
1406      "name": "Mistral: Mistral Small Creative",
1407      "cost_per_1m_in": 0.09999999999999999,
1408      "cost_per_1m_out": 0.3,
1409      "cost_per_1m_in_cached": 0,
1410      "cost_per_1m_out_cached": 0,
1411      "context_window": 32768,
1412      "default_max_tokens": 3276,
1413      "can_reason": false,
1414      "supports_attachments": false,
1415      "options": {}
1416    },
1417    {
1418      "id": "mistralai/mixtral-8x22b-instruct",
1419      "name": "Mistral: Mixtral 8x22B Instruct",
1420      "cost_per_1m_in": 2,
1421      "cost_per_1m_out": 6,
1422      "cost_per_1m_in_cached": 0,
1423      "cost_per_1m_out_cached": 0,
1424      "context_window": 65536,
1425      "default_max_tokens": 6553,
1426      "can_reason": false,
1427      "supports_attachments": false,
1428      "options": {}
1429    },
1430    {
1431      "id": "mistralai/mixtral-8x7b-instruct",
1432      "name": "Mistral: Mixtral 8x7B Instruct",
1433      "cost_per_1m_in": 0.54,
1434      "cost_per_1m_out": 0.54,
1435      "cost_per_1m_in_cached": 0,
1436      "cost_per_1m_out_cached": 0,
1437      "context_window": 32768,
1438      "default_max_tokens": 8192,
1439      "can_reason": false,
1440      "supports_attachments": false,
1441      "options": {}
1442    },
1443    {
1444      "id": "mistralai/pixtral-large-2411",
1445      "name": "Mistral: Pixtral Large 2411",
1446      "cost_per_1m_in": 2,
1447      "cost_per_1m_out": 6,
1448      "cost_per_1m_in_cached": 0,
1449      "cost_per_1m_out_cached": 0,
1450      "context_window": 131072,
1451      "default_max_tokens": 13107,
1452      "can_reason": false,
1453      "supports_attachments": true,
1454      "options": {}
1455    },
1456    {
1457      "id": "mistralai/mistral-saba",
1458      "name": "Mistral: Saba",
1459      "cost_per_1m_in": 0.19999999999999998,
1460      "cost_per_1m_out": 0.6,
1461      "cost_per_1m_in_cached": 0,
1462      "cost_per_1m_out_cached": 0,
1463      "context_window": 32768,
1464      "default_max_tokens": 3276,
1465      "can_reason": false,
1466      "supports_attachments": false,
1467      "options": {}
1468    },
1469    {
1470      "id": "mistralai/voxtral-small-24b-2507",
1471      "name": "Mistral: Voxtral Small 24B 2507",
1472      "cost_per_1m_in": 0.09999999999999999,
1473      "cost_per_1m_out": 0.3,
1474      "cost_per_1m_in_cached": 0,
1475      "cost_per_1m_out_cached": 0,
1476      "context_window": 32000,
1477      "default_max_tokens": 3200,
1478      "can_reason": false,
1479      "supports_attachments": false,
1480      "options": {}
1481    },
1482    {
1483      "id": "moonshotai/kimi-k2",
1484      "name": "MoonshotAI: Kimi K2 0711",
1485      "cost_per_1m_in": 0.456,
1486      "cost_per_1m_out": 1.8399999999999999,
1487      "cost_per_1m_in_cached": 0,
1488      "cost_per_1m_out_cached": 0,
1489      "context_window": 131072,
1490      "default_max_tokens": 65536,
1491      "can_reason": false,
1492      "supports_attachments": false,
1493      "options": {}
1494    },
1495    {
1496      "id": "moonshotai/kimi-k2-0905",
1497      "name": "MoonshotAI: Kimi K2 0905",
1498      "cost_per_1m_in": 0.6,
1499      "cost_per_1m_out": 2.5,
1500      "cost_per_1m_in_cached": 0,
1501      "cost_per_1m_out_cached": 0,
1502      "context_window": 262144,
1503      "default_max_tokens": 26214,
1504      "can_reason": false,
1505      "supports_attachments": false,
1506      "options": {}
1507    },
1508    {
1509      "id": "moonshotai/kimi-k2-0905:exacto",
1510      "name": "MoonshotAI: Kimi K2 0905 (exacto)",
1511      "cost_per_1m_in": 1,
1512      "cost_per_1m_out": 3,
1513      "cost_per_1m_in_cached": 0,
1514      "cost_per_1m_out_cached": 0,
1515      "context_window": 262144,
1516      "default_max_tokens": 8192,
1517      "can_reason": false,
1518      "supports_attachments": false,
1519      "options": {}
1520    },
1521    {
1522      "id": "moonshotai/kimi-k2-thinking",
1523      "name": "MoonshotAI: Kimi K2 Thinking",
1524      "cost_per_1m_in": 0.39999999999999997,
1525      "cost_per_1m_out": 1.75,
1526      "cost_per_1m_in_cached": 0,
1527      "cost_per_1m_out_cached": 0,
1528      "context_window": 262144,
1529      "default_max_tokens": 32767,
1530      "can_reason": true,
1531      "reasoning_levels": [
1532        "low",
1533        "medium",
1534        "high"
1535      ],
1536      "default_reasoning_effort": "medium",
1537      "supports_attachments": false,
1538      "options": {}
1539    },
1540    {
1541      "id": "nvidia/llama-3.1-nemotron-70b-instruct",
1542      "name": "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
1543      "cost_per_1m_in": 1.2,
1544      "cost_per_1m_out": 1.2,
1545      "cost_per_1m_in_cached": 0,
1546      "cost_per_1m_out_cached": 0,
1547      "context_window": 131072,
1548      "default_max_tokens": 8192,
1549      "can_reason": false,
1550      "supports_attachments": false,
1551      "options": {}
1552    },
1553    {
1554      "id": "nvidia/llama-3.3-nemotron-super-49b-v1.5",
1555      "name": "NVIDIA: Llama 3.3 Nemotron Super 49B V1.5",
1556      "cost_per_1m_in": 0.09999999999999999,
1557      "cost_per_1m_out": 0.39999999999999997,
1558      "cost_per_1m_in_cached": 0,
1559      "cost_per_1m_out_cached": 0,
1560      "context_window": 131072,
1561      "default_max_tokens": 13107,
1562      "can_reason": true,
1563      "reasoning_levels": [
1564        "low",
1565        "medium",
1566        "high"
1567      ],
1568      "default_reasoning_effort": "medium",
1569      "supports_attachments": false,
1570      "options": {}
1571    },
1572    {
1573      "id": "nvidia/nemotron-3-nano-30b-a3b",
1574      "name": "NVIDIA: Nemotron 3 Nano 30B A3B",
1575      "cost_per_1m_in": 0.06,
1576      "cost_per_1m_out": 0.24,
1577      "cost_per_1m_in_cached": 0,
1578      "cost_per_1m_out_cached": 0,
1579      "context_window": 262144,
1580      "default_max_tokens": 26214,
1581      "can_reason": true,
1582      "reasoning_levels": [
1583        "low",
1584        "medium",
1585        "high"
1586      ],
1587      "default_reasoning_effort": "medium",
1588      "supports_attachments": false,
1589      "options": {}
1590    },
1591    {
1592      "id": "nvidia/nemotron-3-nano-30b-a3b:free",
1593      "name": "NVIDIA: Nemotron 3 Nano 30B A3B (free)",
1594      "cost_per_1m_in": 0,
1595      "cost_per_1m_out": 0,
1596      "cost_per_1m_in_cached": 0,
1597      "cost_per_1m_out_cached": 0,
1598      "context_window": 256000,
1599      "default_max_tokens": 25600,
1600      "can_reason": true,
1601      "reasoning_levels": [
1602        "low",
1603        "medium",
1604        "high"
1605      ],
1606      "default_reasoning_effort": "medium",
1607      "supports_attachments": false,
1608      "options": {}
1609    },
1610    {
1611      "id": "nvidia/nemotron-nano-12b-v2-vl:free",
1612      "name": "NVIDIA: Nemotron Nano 12B 2 VL (free)",
1613      "cost_per_1m_in": 0,
1614      "cost_per_1m_out": 0,
1615      "cost_per_1m_in_cached": 0,
1616      "cost_per_1m_out_cached": 0,
1617      "context_window": 128000,
1618      "default_max_tokens": 64000,
1619      "can_reason": true,
1620      "reasoning_levels": [
1621        "low",
1622        "medium",
1623        "high"
1624      ],
1625      "default_reasoning_effort": "medium",
1626      "supports_attachments": true,
1627      "options": {}
1628    },
1629    {
1630      "id": "nvidia/nemotron-nano-9b-v2",
1631      "name": "NVIDIA: Nemotron Nano 9B V2",
1632      "cost_per_1m_in": 0.04,
1633      "cost_per_1m_out": 0.16,
1634      "cost_per_1m_in_cached": 0,
1635      "cost_per_1m_out_cached": 0,
1636      "context_window": 131072,
1637      "default_max_tokens": 13107,
1638      "can_reason": true,
1639      "reasoning_levels": [
1640        "low",
1641        "medium",
1642        "high"
1643      ],
1644      "default_reasoning_effort": "medium",
1645      "supports_attachments": false,
1646      "options": {}
1647    },
1648    {
1649      "id": "nvidia/nemotron-nano-9b-v2:free",
1650      "name": "NVIDIA: Nemotron Nano 9B V2 (free)",
1651      "cost_per_1m_in": 0,
1652      "cost_per_1m_out": 0,
1653      "cost_per_1m_in_cached": 0,
1654      "cost_per_1m_out_cached": 0,
1655      "context_window": 128000,
1656      "default_max_tokens": 12800,
1657      "can_reason": true,
1658      "reasoning_levels": [
1659        "low",
1660        "medium",
1661        "high"
1662      ],
1663      "default_reasoning_effort": "medium",
1664      "supports_attachments": false,
1665      "options": {}
1666    },
1667    {
1668      "id": "nex-agi/deepseek-v3.1-nex-n1:free",
1669      "name": "Nex AGI: DeepSeek V3.1 Nex N1 (free)",
1670      "cost_per_1m_in": 0,
1671      "cost_per_1m_out": 0,
1672      "cost_per_1m_in_cached": 0,
1673      "cost_per_1m_out_cached": 0,
1674      "context_window": 131072,
1675      "default_max_tokens": 81920,
1676      "can_reason": false,
1677      "supports_attachments": false,
1678      "options": {}
1679    },
1680    {
1681      "id": "nousresearch/deephermes-3-mistral-24b-preview",
1682      "name": "Nous: DeepHermes 3 Mistral 24B Preview",
1683      "cost_per_1m_in": 0.02,
1684      "cost_per_1m_out": 0.09999999999999999,
1685      "cost_per_1m_in_cached": 0,
1686      "cost_per_1m_out_cached": 0,
1687      "context_window": 32768,
1688      "default_max_tokens": 16384,
1689      "can_reason": true,
1690      "reasoning_levels": [
1691        "low",
1692        "medium",
1693        "high"
1694      ],
1695      "default_reasoning_effort": "medium",
1696      "supports_attachments": false,
1697      "options": {}
1698    },
1699    {
1700      "id": "nousresearch/hermes-4-70b",
1701      "name": "Nous: Hermes 4 70B",
1702      "cost_per_1m_in": 0.11,
1703      "cost_per_1m_out": 0.38,
1704      "cost_per_1m_in_cached": 0,
1705      "cost_per_1m_out_cached": 0,
1706      "context_window": 131072,
1707      "default_max_tokens": 65536,
1708      "can_reason": true,
1709      "reasoning_levels": [
1710        "low",
1711        "medium",
1712        "high"
1713      ],
1714      "default_reasoning_effort": "medium",
1715      "supports_attachments": false,
1716      "options": {}
1717    },
1718    {
1719      "id": "openai/codex-mini",
1720      "name": "OpenAI: Codex Mini",
1721      "cost_per_1m_in": 1.5,
1722      "cost_per_1m_out": 6,
1723      "cost_per_1m_in_cached": 0,
1724      "cost_per_1m_out_cached": 0.375,
1725      "context_window": 200000,
1726      "default_max_tokens": 50000,
1727      "can_reason": true,
1728      "reasoning_levels": [
1729        "low",
1730        "medium",
1731        "high"
1732      ],
1733      "default_reasoning_effort": "medium",
1734      "supports_attachments": true,
1735      "options": {}
1736    },
1737    {
1738      "id": "openai/gpt-4-turbo",
1739      "name": "OpenAI: GPT-4 Turbo",
1740      "cost_per_1m_in": 10,
1741      "cost_per_1m_out": 30,
1742      "cost_per_1m_in_cached": 0,
1743      "cost_per_1m_out_cached": 0,
1744      "context_window": 128000,
1745      "default_max_tokens": 2048,
1746      "can_reason": false,
1747      "supports_attachments": true,
1748      "options": {}
1749    },
1750    {
1751      "id": "openai/gpt-4-1106-preview",
1752      "name": "OpenAI: GPT-4 Turbo (older v1106)",
1753      "cost_per_1m_in": 10,
1754      "cost_per_1m_out": 30,
1755      "cost_per_1m_in_cached": 0,
1756      "cost_per_1m_out_cached": 0,
1757      "context_window": 128000,
1758      "default_max_tokens": 2048,
1759      "can_reason": false,
1760      "supports_attachments": false,
1761      "options": {}
1762    },
1763    {
1764      "id": "openai/gpt-4-turbo-preview",
1765      "name": "OpenAI: GPT-4 Turbo Preview",
1766      "cost_per_1m_in": 10,
1767      "cost_per_1m_out": 30,
1768      "cost_per_1m_in_cached": 0,
1769      "cost_per_1m_out_cached": 0,
1770      "context_window": 128000,
1771      "default_max_tokens": 2048,
1772      "can_reason": false,
1773      "supports_attachments": false,
1774      "options": {}
1775    },
1776    {
1777      "id": "openai/gpt-4.1",
1778      "name": "OpenAI: GPT-4.1",
1779      "cost_per_1m_in": 2,
1780      "cost_per_1m_out": 8,
1781      "cost_per_1m_in_cached": 0,
1782      "cost_per_1m_out_cached": 0.5,
1783      "context_window": 1047576,
1784      "default_max_tokens": 104757,
1785      "can_reason": false,
1786      "supports_attachments": true,
1787      "options": {}
1788    },
1789    {
1790      "id": "openai/gpt-4.1-mini",
1791      "name": "OpenAI: GPT-4.1 Mini",
1792      "cost_per_1m_in": 0.39999999999999997,
1793      "cost_per_1m_out": 1.5999999999999999,
1794      "cost_per_1m_in_cached": 0,
1795      "cost_per_1m_out_cached": 0.09999999999999999,
1796      "context_window": 1047576,
1797      "default_max_tokens": 16384,
1798      "can_reason": false,
1799      "supports_attachments": true,
1800      "options": {}
1801    },
1802    {
1803      "id": "openai/gpt-4.1-nano",
1804      "name": "OpenAI: GPT-4.1 Nano",
1805      "cost_per_1m_in": 0.09999999999999999,
1806      "cost_per_1m_out": 0.39999999999999997,
1807      "cost_per_1m_in_cached": 0,
1808      "cost_per_1m_out_cached": 0.024999999999999998,
1809      "context_window": 1047576,
1810      "default_max_tokens": 16384,
1811      "can_reason": false,
1812      "supports_attachments": true,
1813      "options": {}
1814    },
1815    {
1816      "id": "openai/gpt-4o",
1817      "name": "OpenAI: GPT-4o",
1818      "cost_per_1m_in": 2.5,
1819      "cost_per_1m_out": 10,
1820      "cost_per_1m_in_cached": 0,
1821      "cost_per_1m_out_cached": 0,
1822      "context_window": 128000,
1823      "default_max_tokens": 8192,
1824      "can_reason": false,
1825      "supports_attachments": true,
1826      "options": {}
1827    },
1828    {
1829      "id": "openai/gpt-4o-2024-05-13",
1830      "name": "OpenAI: GPT-4o (2024-05-13)",
1831      "cost_per_1m_in": 5,
1832      "cost_per_1m_out": 15,
1833      "cost_per_1m_in_cached": 0,
1834      "cost_per_1m_out_cached": 0,
1835      "context_window": 128000,
1836      "default_max_tokens": 2048,
1837      "can_reason": false,
1838      "supports_attachments": true,
1839      "options": {}
1840    },
1841    {
1842      "id": "openai/gpt-4o-2024-08-06",
1843      "name": "OpenAI: GPT-4o (2024-08-06)",
1844      "cost_per_1m_in": 2.5,
1845      "cost_per_1m_out": 10,
1846      "cost_per_1m_in_cached": 0,
1847      "cost_per_1m_out_cached": 1.25,
1848      "context_window": 128000,
1849      "default_max_tokens": 8192,
1850      "can_reason": false,
1851      "supports_attachments": true,
1852      "options": {}
1853    },
1854    {
1855      "id": "openai/gpt-4o-2024-11-20",
1856      "name": "OpenAI: GPT-4o (2024-11-20)",
1857      "cost_per_1m_in": 2.5,
1858      "cost_per_1m_out": 10,
1859      "cost_per_1m_in_cached": 0,
1860      "cost_per_1m_out_cached": 1.25,
1861      "context_window": 128000,
1862      "default_max_tokens": 8192,
1863      "can_reason": false,
1864      "supports_attachments": true,
1865      "options": {}
1866    },
1867    {
1868      "id": "openai/gpt-4o:extended",
1869      "name": "OpenAI: GPT-4o (extended)",
1870      "cost_per_1m_in": 6,
1871      "cost_per_1m_out": 18,
1872      "cost_per_1m_in_cached": 0,
1873      "cost_per_1m_out_cached": 0,
1874      "context_window": 128000,
1875      "default_max_tokens": 32000,
1876      "can_reason": false,
1877      "supports_attachments": true,
1878      "options": {}
1879    },
1880    {
1881      "id": "openai/gpt-4o-audio-preview",
1882      "name": "OpenAI: GPT-4o Audio",
1883      "cost_per_1m_in": 2.5,
1884      "cost_per_1m_out": 10,
1885      "cost_per_1m_in_cached": 0,
1886      "cost_per_1m_out_cached": 0,
1887      "context_window": 128000,
1888      "default_max_tokens": 8192,
1889      "can_reason": false,
1890      "supports_attachments": false,
1891      "options": {}
1892    },
1893    {
1894      "id": "openai/gpt-4o-mini",
1895      "name": "OpenAI: GPT-4o-mini",
1896      "cost_per_1m_in": 0.15,
1897      "cost_per_1m_out": 0.6,
1898      "cost_per_1m_in_cached": 0,
1899      "cost_per_1m_out_cached": 0.075,
1900      "context_window": 128000,
1901      "default_max_tokens": 8192,
1902      "can_reason": false,
1903      "supports_attachments": true,
1904      "options": {}
1905    },
1906    {
1907      "id": "openai/gpt-4o-mini-2024-07-18",
1908      "name": "OpenAI: GPT-4o-mini (2024-07-18)",
1909      "cost_per_1m_in": 0.15,
1910      "cost_per_1m_out": 0.6,
1911      "cost_per_1m_in_cached": 0,
1912      "cost_per_1m_out_cached": 0.075,
1913      "context_window": 128000,
1914      "default_max_tokens": 8192,
1915      "can_reason": false,
1916      "supports_attachments": true,
1917      "options": {}
1918    },
1919    {
1920      "id": "openai/gpt-5",
1921      "name": "OpenAI: GPT-5",
1922      "cost_per_1m_in": 1.25,
1923      "cost_per_1m_out": 10,
1924      "cost_per_1m_in_cached": 0,
1925      "cost_per_1m_out_cached": 0.125,
1926      "context_window": 400000,
1927      "default_max_tokens": 64000,
1928      "can_reason": true,
1929      "reasoning_levels": [
1930        "low",
1931        "medium",
1932        "high"
1933      ],
1934      "default_reasoning_effort": "medium",
1935      "supports_attachments": true,
1936      "options": {}
1937    },
1938    {
1939      "id": "openai/gpt-5-codex",
1940      "name": "OpenAI: GPT-5 Codex",
1941      "cost_per_1m_in": 1.25,
1942      "cost_per_1m_out": 10,
1943      "cost_per_1m_in_cached": 0,
1944      "cost_per_1m_out_cached": 0.125,
1945      "context_window": 400000,
1946      "default_max_tokens": 64000,
1947      "can_reason": true,
1948      "reasoning_levels": [
1949        "low",
1950        "medium",
1951        "high"
1952      ],
1953      "default_reasoning_effort": "medium",
1954      "supports_attachments": true,
1955      "options": {}
1956    },
1957    {
1958      "id": "openai/gpt-5-image",
1959      "name": "OpenAI: GPT-5 Image",
1960      "cost_per_1m_in": 10,
1961      "cost_per_1m_out": 10,
1962      "cost_per_1m_in_cached": 0,
1963      "cost_per_1m_out_cached": 1.25,
1964      "context_window": 400000,
1965      "default_max_tokens": 64000,
1966      "can_reason": true,
1967      "reasoning_levels": [
1968        "low",
1969        "medium",
1970        "high"
1971      ],
1972      "default_reasoning_effort": "medium",
1973      "supports_attachments": true,
1974      "options": {}
1975    },
1976    {
1977      "id": "openai/gpt-5-image-mini",
1978      "name": "OpenAI: GPT-5 Image Mini",
1979      "cost_per_1m_in": 2.5,
1980      "cost_per_1m_out": 2,
1981      "cost_per_1m_in_cached": 0,
1982      "cost_per_1m_out_cached": 0.25,
1983      "context_window": 400000,
1984      "default_max_tokens": 64000,
1985      "can_reason": true,
1986      "reasoning_levels": [
1987        "low",
1988        "medium",
1989        "high"
1990      ],
1991      "default_reasoning_effort": "medium",
1992      "supports_attachments": true,
1993      "options": {}
1994    },
1995    {
1996      "id": "openai/gpt-5-mini",
1997      "name": "OpenAI: GPT-5 Mini",
1998      "cost_per_1m_in": 0.25,
1999      "cost_per_1m_out": 2,
2000      "cost_per_1m_in_cached": 0,
2001      "cost_per_1m_out_cached": 0.03,
2002      "context_window": 400000,
2003      "default_max_tokens": 40000,
2004      "can_reason": true,
2005      "reasoning_levels": [
2006        "low",
2007        "medium",
2008        "high"
2009      ],
2010      "default_reasoning_effort": "medium",
2011      "supports_attachments": true,
2012      "options": {}
2013    },
2014    {
2015      "id": "openai/gpt-5-nano",
2016      "name": "OpenAI: GPT-5 Nano",
2017      "cost_per_1m_in": 0.049999999999999996,
2018      "cost_per_1m_out": 0.39999999999999997,
2019      "cost_per_1m_in_cached": 0,
2020      "cost_per_1m_out_cached": 0.01,
2021      "context_window": 400000,
2022      "default_max_tokens": 40000,
2023      "can_reason": true,
2024      "reasoning_levels": [
2025        "low",
2026        "medium",
2027        "high"
2028      ],
2029      "default_reasoning_effort": "medium",
2030      "supports_attachments": true,
2031      "options": {}
2032    },
2033    {
2034      "id": "openai/gpt-5-pro",
2035      "name": "OpenAI: GPT-5 Pro",
2036      "cost_per_1m_in": 15,
2037      "cost_per_1m_out": 120,
2038      "cost_per_1m_in_cached": 0,
2039      "cost_per_1m_out_cached": 0,
2040      "context_window": 400000,
2041      "default_max_tokens": 64000,
2042      "can_reason": true,
2043      "reasoning_levels": [
2044        "low",
2045        "medium",
2046        "high"
2047      ],
2048      "default_reasoning_effort": "medium",
2049      "supports_attachments": true,
2050      "options": {}
2051    },
2052    {
2053      "id": "openai/gpt-5.1",
2054      "name": "OpenAI: GPT-5.1",
2055      "cost_per_1m_in": 1.25,
2056      "cost_per_1m_out": 10,
2057      "cost_per_1m_in_cached": 0,
2058      "cost_per_1m_out_cached": 0.125,
2059      "context_window": 400000,
2060      "default_max_tokens": 64000,
2061      "can_reason": true,
2062      "reasoning_levels": [
2063        "low",
2064        "medium",
2065        "high"
2066      ],
2067      "default_reasoning_effort": "medium",
2068      "supports_attachments": true,
2069      "options": {}
2070    },
2071    {
2072      "id": "openai/gpt-5.1-chat",
2073      "name": "OpenAI: GPT-5.1 Chat",
2074      "cost_per_1m_in": 1.25,
2075      "cost_per_1m_out": 10,
2076      "cost_per_1m_in_cached": 0,
2077      "cost_per_1m_out_cached": 0.125,
2078      "context_window": 128000,
2079      "default_max_tokens": 8192,
2080      "can_reason": false,
2081      "supports_attachments": true,
2082      "options": {}
2083    },
2084    {
2085      "id": "openai/gpt-5.1-codex",
2086      "name": "OpenAI: GPT-5.1-Codex",
2087      "cost_per_1m_in": 1.25,
2088      "cost_per_1m_out": 10,
2089      "cost_per_1m_in_cached": 0,
2090      "cost_per_1m_out_cached": 0.125,
2091      "context_window": 400000,
2092      "default_max_tokens": 64000,
2093      "can_reason": true,
2094      "reasoning_levels": [
2095        "low",
2096        "medium",
2097        "high"
2098      ],
2099      "default_reasoning_effort": "medium",
2100      "supports_attachments": true,
2101      "options": {}
2102    },
2103    {
2104      "id": "openai/gpt-5.1-codex-max",
2105      "name": "OpenAI: GPT-5.1-Codex-Max",
2106      "cost_per_1m_in": 1.25,
2107      "cost_per_1m_out": 10,
2108      "cost_per_1m_in_cached": 0,
2109      "cost_per_1m_out_cached": 0.125,
2110      "context_window": 400000,
2111      "default_max_tokens": 64000,
2112      "can_reason": true,
2113      "reasoning_levels": [
2114        "low",
2115        "medium",
2116        "high"
2117      ],
2118      "default_reasoning_effort": "medium",
2119      "supports_attachments": true,
2120      "options": {}
2121    },
2122    {
2123      "id": "openai/gpt-5.1-codex-mini",
2124      "name": "OpenAI: GPT-5.1-Codex-Mini",
2125      "cost_per_1m_in": 0.25,
2126      "cost_per_1m_out": 2,
2127      "cost_per_1m_in_cached": 0,
2128      "cost_per_1m_out_cached": 0.024999999999999998,
2129      "context_window": 400000,
2130      "default_max_tokens": 50000,
2131      "can_reason": true,
2132      "reasoning_levels": [
2133        "low",
2134        "medium",
2135        "high"
2136      ],
2137      "default_reasoning_effort": "medium",
2138      "supports_attachments": true,
2139      "options": {}
2140    },
2141    {
2142      "id": "openai/gpt-5.2",
2143      "name": "OpenAI: GPT-5.2",
2144      "cost_per_1m_in": 1.75,
2145      "cost_per_1m_out": 14,
2146      "cost_per_1m_in_cached": 0,
2147      "cost_per_1m_out_cached": 0.175,
2148      "context_window": 400000,
2149      "default_max_tokens": 64000,
2150      "can_reason": true,
2151      "reasoning_levels": [
2152        "low",
2153        "medium",
2154        "high"
2155      ],
2156      "default_reasoning_effort": "medium",
2157      "supports_attachments": true,
2158      "options": {}
2159    },
2160    {
2161      "id": "openai/gpt-5.2-chat",
2162      "name": "OpenAI: GPT-5.2 Chat",
2163      "cost_per_1m_in": 1.75,
2164      "cost_per_1m_out": 14,
2165      "cost_per_1m_in_cached": 0,
2166      "cost_per_1m_out_cached": 0.175,
2167      "context_window": 128000,
2168      "default_max_tokens": 8192,
2169      "can_reason": false,
2170      "supports_attachments": true,
2171      "options": {}
2172    },
2173    {
2174      "id": "openai/gpt-5.2-pro",
2175      "name": "OpenAI: GPT-5.2 Pro",
2176      "cost_per_1m_in": 21,
2177      "cost_per_1m_out": 168,
2178      "cost_per_1m_in_cached": 0,
2179      "cost_per_1m_out_cached": 0,
2180      "context_window": 400000,
2181      "default_max_tokens": 64000,
2182      "can_reason": true,
2183      "reasoning_levels": [
2184        "low",
2185        "medium",
2186        "high"
2187      ],
2188      "default_reasoning_effort": "medium",
2189      "supports_attachments": true,
2190      "options": {}
2191    },
2192    {
2193      "id": "openai/gpt-oss-120b",
2194      "name": "OpenAI: gpt-oss-120b",
2195      "cost_per_1m_in": 0.04,
2196      "cost_per_1m_out": 0.25,
2197      "cost_per_1m_in_cached": 0,
2198      "cost_per_1m_out_cached": 0,
2199      "context_window": 131072,
2200      "default_max_tokens": 32768,
2201      "can_reason": true,
2202      "reasoning_levels": [
2203        "low",
2204        "medium",
2205        "high"
2206      ],
2207      "default_reasoning_effort": "medium",
2208      "supports_attachments": false,
2209      "options": {}
2210    },
2211    {
2212      "id": "openai/gpt-oss-120b:exacto",
2213      "name": "OpenAI: gpt-oss-120b (exacto)",
2214      "cost_per_1m_in": 0.15,
2215      "cost_per_1m_out": 0.6,
2216      "cost_per_1m_in_cached": 0,
2217      "cost_per_1m_out_cached": 0,
2218      "context_window": 131072,
2219      "default_max_tokens": 32768,
2220      "can_reason": true,
2221      "reasoning_levels": [
2222        "low",
2223        "medium",
2224        "high"
2225      ],
2226      "default_reasoning_effort": "medium",
2227      "supports_attachments": false,
2228      "options": {}
2229    },
2230    {
2231      "id": "openai/gpt-oss-120b:free",
2232      "name": "OpenAI: gpt-oss-120b (free)",
2233      "cost_per_1m_in": 0,
2234      "cost_per_1m_out": 0,
2235      "cost_per_1m_in_cached": 0,
2236      "cost_per_1m_out_cached": 0,
2237      "context_window": 131072,
2238      "default_max_tokens": 13107,
2239      "can_reason": true,
2240      "reasoning_levels": [
2241        "low",
2242        "medium",
2243        "high"
2244      ],
2245      "default_reasoning_effort": "medium",
2246      "supports_attachments": false,
2247      "options": {}
2248    },
2249    {
2250      "id": "openai/gpt-oss-20b",
2251      "name": "OpenAI: gpt-oss-20b",
2252      "cost_per_1m_in": 0.04,
2253      "cost_per_1m_out": 0.15,
2254      "cost_per_1m_in_cached": 0,
2255      "cost_per_1m_out_cached": 0,
2256      "context_window": 131072,
2257      "default_max_tokens": 13107,
2258      "can_reason": true,
2259      "reasoning_levels": [
2260        "low",
2261        "medium",
2262        "high"
2263      ],
2264      "default_reasoning_effort": "medium",
2265      "supports_attachments": false,
2266      "options": {}
2267    },
2268    {
2269      "id": "openai/gpt-oss-20b:free",
2270      "name": "OpenAI: gpt-oss-20b (free)",
2271      "cost_per_1m_in": 0,
2272      "cost_per_1m_out": 0,
2273      "cost_per_1m_in_cached": 0,
2274      "cost_per_1m_out_cached": 0,
2275      "context_window": 131072,
2276      "default_max_tokens": 65536,
2277      "can_reason": true,
2278      "reasoning_levels": [
2279        "low",
2280        "medium",
2281        "high"
2282      ],
2283      "default_reasoning_effort": "medium",
2284      "supports_attachments": false,
2285      "options": {}
2286    },
2287    {
2288      "id": "openai/gpt-oss-safeguard-20b",
2289      "name": "OpenAI: gpt-oss-safeguard-20b",
2290      "cost_per_1m_in": 0.075,
2291      "cost_per_1m_out": 0.3,
2292      "cost_per_1m_in_cached": 0,
2293      "cost_per_1m_out_cached": 0.037,
2294      "context_window": 131072,
2295      "default_max_tokens": 32768,
2296      "can_reason": true,
2297      "reasoning_levels": [
2298        "low",
2299        "medium",
2300        "high"
2301      ],
2302      "default_reasoning_effort": "medium",
2303      "supports_attachments": false,
2304      "options": {}
2305    },
2306    {
2307      "id": "openai/o1",
2308      "name": "OpenAI: o1",
2309      "cost_per_1m_in": 15,
2310      "cost_per_1m_out": 60,
2311      "cost_per_1m_in_cached": 0,
2312      "cost_per_1m_out_cached": 7.5,
2313      "context_window": 200000,
2314      "default_max_tokens": 50000,
2315      "can_reason": false,
2316      "supports_attachments": true,
2317      "options": {}
2318    },
2319    {
2320      "id": "openai/o3",
2321      "name": "OpenAI: o3",
2322      "cost_per_1m_in": 2,
2323      "cost_per_1m_out": 8,
2324      "cost_per_1m_in_cached": 0,
2325      "cost_per_1m_out_cached": 0.5,
2326      "context_window": 200000,
2327      "default_max_tokens": 50000,
2328      "can_reason": true,
2329      "reasoning_levels": [
2330        "low",
2331        "medium",
2332        "high"
2333      ],
2334      "default_reasoning_effort": "medium",
2335      "supports_attachments": true,
2336      "options": {}
2337    },
2338    {
2339      "id": "openai/o3-deep-research",
2340      "name": "OpenAI: o3 Deep Research",
2341      "cost_per_1m_in": 10,
2342      "cost_per_1m_out": 40,
2343      "cost_per_1m_in_cached": 0,
2344      "cost_per_1m_out_cached": 2.5,
2345      "context_window": 200000,
2346      "default_max_tokens": 50000,
2347      "can_reason": true,
2348      "reasoning_levels": [
2349        "low",
2350        "medium",
2351        "high"
2352      ],
2353      "default_reasoning_effort": "medium",
2354      "supports_attachments": true,
2355      "options": {}
2356    },
2357    {
2358      "id": "openai/o3-mini",
2359      "name": "OpenAI: o3 Mini",
2360      "cost_per_1m_in": 1.1,
2361      "cost_per_1m_out": 4.4,
2362      "cost_per_1m_in_cached": 0,
2363      "cost_per_1m_out_cached": 0.55,
2364      "context_window": 200000,
2365      "default_max_tokens": 50000,
2366      "can_reason": false,
2367      "supports_attachments": false,
2368      "options": {}
2369    },
2370    {
2371      "id": "openai/o3-mini-high",
2372      "name": "OpenAI: o3 Mini High",
2373      "cost_per_1m_in": 1.1,
2374      "cost_per_1m_out": 4.4,
2375      "cost_per_1m_in_cached": 0,
2376      "cost_per_1m_out_cached": 0.55,
2377      "context_window": 200000,
2378      "default_max_tokens": 50000,
2379      "can_reason": false,
2380      "supports_attachments": false,
2381      "options": {}
2382    },
2383    {
2384      "id": "openai/o3-pro",
2385      "name": "OpenAI: o3 Pro",
2386      "cost_per_1m_in": 20,
2387      "cost_per_1m_out": 80,
2388      "cost_per_1m_in_cached": 0,
2389      "cost_per_1m_out_cached": 0,
2390      "context_window": 200000,
2391      "default_max_tokens": 50000,
2392      "can_reason": true,
2393      "reasoning_levels": [
2394        "low",
2395        "medium",
2396        "high"
2397      ],
2398      "default_reasoning_effort": "medium",
2399      "supports_attachments": true,
2400      "options": {}
2401    },
2402    {
2403      "id": "openai/o4-mini",
2404      "name": "OpenAI: o4 Mini",
2405      "cost_per_1m_in": 1.1,
2406      "cost_per_1m_out": 4.4,
2407      "cost_per_1m_in_cached": 0,
2408      "cost_per_1m_out_cached": 0.275,
2409      "context_window": 200000,
2410      "default_max_tokens": 50000,
2411      "can_reason": true,
2412      "reasoning_levels": [
2413        "low",
2414        "medium",
2415        "high"
2416      ],
2417      "default_reasoning_effort": "medium",
2418      "supports_attachments": true,
2419      "options": {}
2420    },
2421    {
2422      "id": "openai/o4-mini-deep-research",
2423      "name": "OpenAI: o4 Mini Deep Research",
2424      "cost_per_1m_in": 2,
2425      "cost_per_1m_out": 8,
2426      "cost_per_1m_in_cached": 0,
2427      "cost_per_1m_out_cached": 0.5,
2428      "context_window": 200000,
2429      "default_max_tokens": 50000,
2430      "can_reason": true,
2431      "reasoning_levels": [
2432        "low",
2433        "medium",
2434        "high"
2435      ],
2436      "default_reasoning_effort": "medium",
2437      "supports_attachments": true,
2438      "options": {}
2439    },
2440    {
2441      "id": "openai/o4-mini-high",
2442      "name": "OpenAI: o4 Mini High",
2443      "cost_per_1m_in": 1.1,
2444      "cost_per_1m_out": 4.4,
2445      "cost_per_1m_in_cached": 0,
2446      "cost_per_1m_out_cached": 0.275,
2447      "context_window": 200000,
2448      "default_max_tokens": 50000,
2449      "can_reason": true,
2450      "reasoning_levels": [
2451        "low",
2452        "medium",
2453        "high"
2454      ],
2455      "default_reasoning_effort": "medium",
2456      "supports_attachments": true,
2457      "options": {}
2458    },
2459    {
2460      "id": "prime-intellect/intellect-3",
2461      "name": "Prime Intellect: INTELLECT-3",
2462      "cost_per_1m_in": 0.19999999999999998,
2463      "cost_per_1m_out": 1.1,
2464      "cost_per_1m_in_cached": 0,
2465      "cost_per_1m_out_cached": 0,
2466      "context_window": 131072,
2467      "default_max_tokens": 65536,
2468      "can_reason": true,
2469      "reasoning_levels": [
2470        "low",
2471        "medium",
2472        "high"
2473      ],
2474      "default_reasoning_effort": "medium",
2475      "supports_attachments": false,
2476      "options": {}
2477    },
2478    {
2479      "id": "qwen/qwen-2.5-72b-instruct",
2480      "name": "Qwen2.5 72B Instruct",
2481      "cost_per_1m_in": 0.13,
2482      "cost_per_1m_out": 0.52,
2483      "cost_per_1m_in_cached": 0,
2484      "cost_per_1m_out_cached": 0,
2485      "context_window": 32768,
2486      "default_max_tokens": 16384,
2487      "can_reason": false,
2488      "supports_attachments": false,
2489      "options": {}
2490    },
2491    {
2492      "id": "qwen/qwen-plus-2025-07-28",
2493      "name": "Qwen: Qwen Plus 0728",
2494      "cost_per_1m_in": 0.39999999999999997,
2495      "cost_per_1m_out": 1.2,
2496      "cost_per_1m_in_cached": 0,
2497      "cost_per_1m_out_cached": 0,
2498      "context_window": 1000000,
2499      "default_max_tokens": 16384,
2500      "can_reason": false,
2501      "supports_attachments": false,
2502      "options": {}
2503    },
2504    {
2505      "id": "qwen/qwen-plus-2025-07-28:thinking",
2506      "name": "Qwen: Qwen Plus 0728 (thinking)",
2507      "cost_per_1m_in": 0.39999999999999997,
2508      "cost_per_1m_out": 4,
2509      "cost_per_1m_in_cached": 0,
2510      "cost_per_1m_out_cached": 0,
2511      "context_window": 1000000,
2512      "default_max_tokens": 16384,
2513      "can_reason": true,
2514      "reasoning_levels": [
2515        "low",
2516        "medium",
2517        "high"
2518      ],
2519      "default_reasoning_effort": "medium",
2520      "supports_attachments": false,
2521      "options": {}
2522    },
2523    {
2524      "id": "qwen/qwen-vl-max",
2525      "name": "Qwen: Qwen VL Max",
2526      "cost_per_1m_in": 0.7999999999999999,
2527      "cost_per_1m_out": 3.1999999999999997,
2528      "cost_per_1m_in_cached": 0,
2529      "cost_per_1m_out_cached": 0,
2530      "context_window": 131072,
2531      "default_max_tokens": 4096,
2532      "can_reason": false,
2533      "supports_attachments": true,
2534      "options": {}
2535    },
2536    {
2537      "id": "qwen/qwen-max",
2538      "name": "Qwen: Qwen-Max ",
2539      "cost_per_1m_in": 1.5999999999999999,
2540      "cost_per_1m_out": 6.3999999999999995,
2541      "cost_per_1m_in_cached": 0,
2542      "cost_per_1m_out_cached": 0.64,
2543      "context_window": 32768,
2544      "default_max_tokens": 4096,
2545      "can_reason": false,
2546      "supports_attachments": false,
2547      "options": {}
2548    },
2549    {
2550      "id": "qwen/qwen-plus",
2551      "name": "Qwen: Qwen-Plus",
2552      "cost_per_1m_in": 0.39999999999999997,
2553      "cost_per_1m_out": 1.2,
2554      "cost_per_1m_in_cached": 0,
2555      "cost_per_1m_out_cached": 0.16,
2556      "context_window": 131072,
2557      "default_max_tokens": 4096,
2558      "can_reason": false,
2559      "supports_attachments": false,
2560      "options": {}
2561    },
2562    {
2563      "id": "qwen/qwen-turbo",
2564      "name": "Qwen: Qwen-Turbo",
2565      "cost_per_1m_in": 0.049999999999999996,
2566      "cost_per_1m_out": 0.19999999999999998,
2567      "cost_per_1m_in_cached": 0,
2568      "cost_per_1m_out_cached": 0.02,
2569      "context_window": 1000000,
2570      "default_max_tokens": 4096,
2571      "can_reason": false,
2572      "supports_attachments": false,
2573      "options": {}
2574    },
2575    {
2576      "id": "qwen/qwen3-14b",
2577      "name": "Qwen: Qwen3 14B",
2578      "cost_per_1m_in": 0.049999999999999996,
2579      "cost_per_1m_out": 0.22,
2580      "cost_per_1m_in_cached": 0,
2581      "cost_per_1m_out_cached": 0,
2582      "context_window": 40960,
2583      "default_max_tokens": 20480,
2584      "can_reason": true,
2585      "reasoning_levels": [
2586        "low",
2587        "medium",
2588        "high"
2589      ],
2590      "default_reasoning_effort": "medium",
2591      "supports_attachments": false,
2592      "options": {}
2593    },
2594    {
2595      "id": "qwen/qwen3-235b-a22b",
2596      "name": "Qwen: Qwen3 235B A22B",
2597      "cost_per_1m_in": 0.22,
2598      "cost_per_1m_out": 0.88,
2599      "cost_per_1m_in_cached": 0,
2600      "cost_per_1m_out_cached": 0,
2601      "context_window": 131072,
2602      "default_max_tokens": 13107,
2603      "can_reason": true,
2604      "reasoning_levels": [
2605        "low",
2606        "medium",
2607        "high"
2608      ],
2609      "default_reasoning_effort": "medium",
2610      "supports_attachments": false,
2611      "options": {}
2612    },
2613    {
2614      "id": "qwen/qwen3-235b-a22b-2507",
2615      "name": "Qwen: Qwen3 235B A22B Instruct 2507",
2616      "cost_per_1m_in": 0.22,
2617      "cost_per_1m_out": 0.88,
2618      "cost_per_1m_in_cached": 0,
2619      "cost_per_1m_out_cached": 0,
2620      "context_window": 262144,
2621      "default_max_tokens": 8192,
2622      "can_reason": false,
2623      "supports_attachments": false,
2624      "options": {}
2625    },
2626    {
2627      "id": "qwen/qwen3-235b-a22b-thinking-2507",
2628      "name": "Qwen: Qwen3 235B A22B Thinking 2507",
2629      "cost_per_1m_in": 0.11,
2630      "cost_per_1m_out": 0.6,
2631      "cost_per_1m_in_cached": 0,
2632      "cost_per_1m_out_cached": 0,
2633      "context_window": 262144,
2634      "default_max_tokens": 131072,
2635      "can_reason": true,
2636      "reasoning_levels": [
2637        "low",
2638        "medium",
2639        "high"
2640      ],
2641      "default_reasoning_effort": "medium",
2642      "supports_attachments": false,
2643      "options": {}
2644    },
2645    {
2646      "id": "qwen/qwen3-30b-a3b",
2647      "name": "Qwen: Qwen3 30B A3B",
2648      "cost_per_1m_in": 0.09,
2649      "cost_per_1m_out": 0.44999999999999996,
2650      "cost_per_1m_in_cached": 0,
2651      "cost_per_1m_out_cached": 0,
2652      "context_window": 131072,
2653      "default_max_tokens": 65536,
2654      "can_reason": true,
2655      "reasoning_levels": [
2656        "low",
2657        "medium",
2658        "high"
2659      ],
2660      "default_reasoning_effort": "medium",
2661      "supports_attachments": false,
2662      "options": {}
2663    },
2664    {
2665      "id": "qwen/qwen3-30b-a3b-instruct-2507",
2666      "name": "Qwen: Qwen3 30B A3B Instruct 2507",
2667      "cost_per_1m_in": 0.08,
2668      "cost_per_1m_out": 0.33,
2669      "cost_per_1m_in_cached": 0,
2670      "cost_per_1m_out_cached": 0,
2671      "context_window": 262144,
2672      "default_max_tokens": 131072,
2673      "can_reason": false,
2674      "supports_attachments": false,
2675      "options": {}
2676    },
2677    {
2678      "id": "qwen/qwen3-30b-a3b-thinking-2507",
2679      "name": "Qwen: Qwen3 30B A3B Thinking 2507",
2680      "cost_per_1m_in": 0.09999999999999999,
2681      "cost_per_1m_out": 0.3,
2682      "cost_per_1m_in_cached": 0,
2683      "cost_per_1m_out_cached": 0,
2684      "context_window": 262144,
2685      "default_max_tokens": 26214,
2686      "can_reason": true,
2687      "reasoning_levels": [
2688        "low",
2689        "medium",
2690        "high"
2691      ],
2692      "default_reasoning_effort": "medium",
2693      "supports_attachments": false,
2694      "options": {}
2695    },
2696    {
2697      "id": "qwen/qwen3-32b",
2698      "name": "Qwen: Qwen3 32B",
2699      "cost_per_1m_in": 0.15,
2700      "cost_per_1m_out": 0.5,
2701      "cost_per_1m_in_cached": 0,
2702      "cost_per_1m_out_cached": 0,
2703      "context_window": 131072,
2704      "default_max_tokens": 4000,
2705      "can_reason": true,
2706      "reasoning_levels": [
2707        "low",
2708        "medium",
2709        "high"
2710      ],
2711      "default_reasoning_effort": "medium",
2712      "supports_attachments": false,
2713      "options": {}
2714    },
2715    {
2716      "id": "qwen/qwen3-4b:free",
2717      "name": "Qwen: Qwen3 4B (free)",
2718      "cost_per_1m_in": 0,
2719      "cost_per_1m_out": 0,
2720      "cost_per_1m_in_cached": 0,
2721      "cost_per_1m_out_cached": 0,
2722      "context_window": 40960,
2723      "default_max_tokens": 4096,
2724      "can_reason": true,
2725      "reasoning_levels": [
2726        "low",
2727        "medium",
2728        "high"
2729      ],
2730      "default_reasoning_effort": "medium",
2731      "supports_attachments": false,
2732      "options": {}
2733    },
2734    {
2735      "id": "qwen/qwen3-8b",
2736      "name": "Qwen: Qwen3 8B",
2737      "cost_per_1m_in": 0.2,
2738      "cost_per_1m_out": 0.2,
2739      "cost_per_1m_in_cached": 0,
2740      "cost_per_1m_out_cached": 0,
2741      "context_window": 40960,
2742      "default_max_tokens": 4096,
2743      "can_reason": true,
2744      "reasoning_levels": [
2745        "low",
2746        "medium",
2747        "high"
2748      ],
2749      "default_reasoning_effort": "medium",
2750      "supports_attachments": false,
2751      "options": {}
2752    },
2753    {
2754      "id": "qwen/qwen3-coder-30b-a3b-instruct",
2755      "name": "Qwen: Qwen3 Coder 30B A3B Instruct",
2756      "cost_per_1m_in": 0.07,
2757      "cost_per_1m_out": 0.28,
2758      "cost_per_1m_in_cached": 0,
2759      "cost_per_1m_out_cached": 0,
2760      "context_window": 262144,
2761      "default_max_tokens": 131072,
2762      "can_reason": false,
2763      "supports_attachments": false,
2764      "options": {}
2765    },
2766    {
2767      "id": "qwen/qwen3-coder",
2768      "name": "Qwen: Qwen3 Coder 480B A35B",
2769      "cost_per_1m_in": 0.39999999999999997,
2770      "cost_per_1m_out": 1.7999999999999998,
2771      "cost_per_1m_in_cached": 0,
2772      "cost_per_1m_out_cached": 0,
2773      "context_window": 262144,
2774      "default_max_tokens": 131072,
2775      "can_reason": false,
2776      "supports_attachments": false,
2777      "options": {}
2778    },
2779    {
2780      "id": "qwen/qwen3-coder:exacto",
2781      "name": "Qwen: Qwen3 Coder 480B A35B (exacto)",
2782      "cost_per_1m_in": 0.22,
2783      "cost_per_1m_out": 1.7999999999999998,
2784      "cost_per_1m_in_cached": 0,
2785      "cost_per_1m_out_cached": 0,
2786      "context_window": 262144,
2787      "default_max_tokens": 32768,
2788      "can_reason": false,
2789      "supports_attachments": false,
2790      "options": {}
2791    },
2792    {
2793      "id": "qwen/qwen3-coder:free",
2794      "name": "Qwen: Qwen3 Coder 480B A35B (free)",
2795      "cost_per_1m_in": 0,
2796      "cost_per_1m_out": 0,
2797      "cost_per_1m_in_cached": 0,
2798      "cost_per_1m_out_cached": 0,
2799      "context_window": 262000,
2800      "default_max_tokens": 131000,
2801      "can_reason": false,
2802      "supports_attachments": false,
2803      "options": {}
2804    },
2805    {
2806      "id": "qwen/qwen3-coder-flash",
2807      "name": "Qwen: Qwen3 Coder Flash",
2808      "cost_per_1m_in": 0.3,
2809      "cost_per_1m_out": 1.5,
2810      "cost_per_1m_in_cached": 0,
2811      "cost_per_1m_out_cached": 0.08,
2812      "context_window": 128000,
2813      "default_max_tokens": 32768,
2814      "can_reason": false,
2815      "supports_attachments": false,
2816      "options": {}
2817    },
2818    {
2819      "id": "qwen/qwen3-coder-plus",
2820      "name": "Qwen: Qwen3 Coder Plus",
2821      "cost_per_1m_in": 1,
2822      "cost_per_1m_out": 5,
2823      "cost_per_1m_in_cached": 0,
2824      "cost_per_1m_out_cached": 0.09999999999999999,
2825      "context_window": 128000,
2826      "default_max_tokens": 32768,
2827      "can_reason": false,
2828      "supports_attachments": false,
2829      "options": {}
2830    },
2831    {
2832      "id": "qwen/qwen3-max",
2833      "name": "Qwen: Qwen3 Max",
2834      "cost_per_1m_in": 1.2,
2835      "cost_per_1m_out": 6,
2836      "cost_per_1m_in_cached": 0,
2837      "cost_per_1m_out_cached": 0.24,
2838      "context_window": 256000,
2839      "default_max_tokens": 16384,
2840      "can_reason": false,
2841      "supports_attachments": false,
2842      "options": {}
2843    },
2844    {
2845      "id": "qwen/qwen3-next-80b-a3b-instruct",
2846      "name": "Qwen: Qwen3 Next 80B A3B Instruct",
2847      "cost_per_1m_in": 0.09,
2848      "cost_per_1m_out": 1.1,
2849      "cost_per_1m_in_cached": 0,
2850      "cost_per_1m_out_cached": 0,
2851      "context_window": 262144,
2852      "default_max_tokens": 26214,
2853      "can_reason": false,
2854      "supports_attachments": false,
2855      "options": {}
2856    },
2857    {
2858      "id": "qwen/qwen3-next-80b-a3b-thinking",
2859      "name": "Qwen: Qwen3 Next 80B A3B Thinking",
2860      "cost_per_1m_in": 0.3,
2861      "cost_per_1m_out": 0.3,
2862      "cost_per_1m_in_cached": 0,
2863      "cost_per_1m_out_cached": 0,
2864      "context_window": 262144,
2865      "default_max_tokens": 131072,
2866      "can_reason": true,
2867      "reasoning_levels": [
2868        "low",
2869        "medium",
2870        "high"
2871      ],
2872      "default_reasoning_effort": "medium",
2873      "supports_attachments": false,
2874      "options": {}
2875    },
2876    {
2877      "id": "qwen/qwen3-vl-235b-a22b-instruct",
2878      "name": "Qwen: Qwen3 VL 235B A22B Instruct",
2879      "cost_per_1m_in": 0.22,
2880      "cost_per_1m_out": 0.88,
2881      "cost_per_1m_in_cached": 0,
2882      "cost_per_1m_out_cached": 0,
2883      "context_window": 262144,
2884      "default_max_tokens": 26214,
2885      "can_reason": false,
2886      "supports_attachments": true,
2887      "options": {}
2888    },
2889    {
2890      "id": "qwen/qwen3-vl-30b-a3b-instruct",
2891      "name": "Qwen: Qwen3 VL 30B A3B Instruct",
2892      "cost_per_1m_in": 0.15,
2893      "cost_per_1m_out": 0.6,
2894      "cost_per_1m_in_cached": 0,
2895      "cost_per_1m_out_cached": 0,
2896      "context_window": 262144,
2897      "default_max_tokens": 26214,
2898      "can_reason": false,
2899      "supports_attachments": true,
2900      "options": {}
2901    },
2902    {
2903      "id": "qwen/qwen3-vl-30b-a3b-thinking",
2904      "name": "Qwen: Qwen3 VL 30B A3B Thinking",
2905      "cost_per_1m_in": 0.16,
2906      "cost_per_1m_out": 0.7999999999999999,
2907      "cost_per_1m_in_cached": 0,
2908      "cost_per_1m_out_cached": 0,
2909      "context_window": 131072,
2910      "default_max_tokens": 16384,
2911      "can_reason": true,
2912      "reasoning_levels": [
2913        "low",
2914        "medium",
2915        "high"
2916      ],
2917      "default_reasoning_effort": "medium",
2918      "supports_attachments": true,
2919      "options": {}
2920    },
2921    {
2922      "id": "qwen/qwen3-vl-8b-instruct",
2923      "name": "Qwen: Qwen3 VL 8B Instruct",
2924      "cost_per_1m_in": 0.18,
2925      "cost_per_1m_out": 0.7,
2926      "cost_per_1m_in_cached": 0,
2927      "cost_per_1m_out_cached": 0,
2928      "context_window": 256000,
2929      "default_max_tokens": 16384,
2930      "can_reason": false,
2931      "supports_attachments": true,
2932      "options": {}
2933    },
2934    {
2935      "id": "qwen/qwen3-vl-8b-thinking",
2936      "name": "Qwen: Qwen3 VL 8B Thinking",
2937      "cost_per_1m_in": 0.18,
2938      "cost_per_1m_out": 2.0999999999999996,
2939      "cost_per_1m_in_cached": 0,
2940      "cost_per_1m_out_cached": 0,
2941      "context_window": 256000,
2942      "default_max_tokens": 16384,
2943      "can_reason": true,
2944      "reasoning_levels": [
2945        "low",
2946        "medium",
2947        "high"
2948      ],
2949      "default_reasoning_effort": "medium",
2950      "supports_attachments": true,
2951      "options": {}
2952    },
2953    {
2954      "id": "relace/relace-search",
2955      "name": "Relace: Relace Search",
2956      "cost_per_1m_in": 1,
2957      "cost_per_1m_out": 3,
2958      "cost_per_1m_in_cached": 0,
2959      "cost_per_1m_out_cached": 0,
2960      "context_window": 256000,
2961      "default_max_tokens": 64000,
2962      "can_reason": false,
2963      "supports_attachments": false,
2964      "options": {}
2965    },
2966    {
2967      "id": "stepfun-ai/step3",
2968      "name": "StepFun: Step3",
2969      "cost_per_1m_in": 0.5700000000000001,
2970      "cost_per_1m_out": 1.42,
2971      "cost_per_1m_in_cached": 0,
2972      "cost_per_1m_out_cached": 0,
2973      "context_window": 65536,
2974      "default_max_tokens": 32768,
2975      "can_reason": true,
2976      "reasoning_levels": [
2977        "low",
2978        "medium",
2979        "high"
2980      ],
2981      "default_reasoning_effort": "medium",
2982      "supports_attachments": true,
2983      "options": {}
2984    },
2985    {
2986      "id": "tngtech/deepseek-r1t2-chimera",
2987      "name": "TNG: DeepSeek R1T2 Chimera",
2988      "cost_per_1m_in": 0.3,
2989      "cost_per_1m_out": 1.2,
2990      "cost_per_1m_in_cached": 0,
2991      "cost_per_1m_out_cached": 0,
2992      "context_window": 163840,
2993      "default_max_tokens": 81920,
2994      "can_reason": true,
2995      "reasoning_levels": [
2996        "low",
2997        "medium",
2998        "high"
2999      ],
3000      "default_reasoning_effort": "medium",
3001      "supports_attachments": false,
3002      "options": {}
3003    },
3004    {
3005      "id": "tngtech/tng-r1t-chimera",
3006      "name": "TNG: R1T Chimera",
3007      "cost_per_1m_in": 0.3,
3008      "cost_per_1m_out": 1.2,
3009      "cost_per_1m_in_cached": 0,
3010      "cost_per_1m_out_cached": 0,
3011      "context_window": 163840,
3012      "default_max_tokens": 32768,
3013      "can_reason": true,
3014      "reasoning_levels": [
3015        "low",
3016        "medium",
3017        "high"
3018      ],
3019      "default_reasoning_effort": "medium",
3020      "supports_attachments": false,
3021      "options": {}
3022    },
3023    {
3024      "id": "tngtech/tng-r1t-chimera:free",
3025      "name": "TNG: R1T Chimera (free)",
3026      "cost_per_1m_in": 0,
3027      "cost_per_1m_out": 0,
3028      "cost_per_1m_in_cached": 0,
3029      "cost_per_1m_out_cached": 0,
3030      "context_window": 163840,
3031      "default_max_tokens": 81920,
3032      "can_reason": true,
3033      "reasoning_levels": [
3034        "low",
3035        "medium",
3036        "high"
3037      ],
3038      "default_reasoning_effort": "medium",
3039      "supports_attachments": false,
3040      "options": {}
3041    },
3042    {
3043      "id": "thedrummer/rocinante-12b",
3044      "name": "TheDrummer: Rocinante 12B",
3045      "cost_per_1m_in": 0.16999999999999998,
3046      "cost_per_1m_out": 0.43,
3047      "cost_per_1m_in_cached": 0,
3048      "cost_per_1m_out_cached": 0,
3049      "context_window": 32768,
3050      "default_max_tokens": 3276,
3051      "can_reason": false,
3052      "supports_attachments": false,
3053      "options": {}
3054    },
3055    {
3056      "id": "thedrummer/unslopnemo-12b",
3057      "name": "TheDrummer: UnslopNemo 12B",
3058      "cost_per_1m_in": 0.39999999999999997,
3059      "cost_per_1m_out": 0.39999999999999997,
3060      "cost_per_1m_in_cached": 0,
3061      "cost_per_1m_out_cached": 0,
3062      "context_window": 32768,
3063      "default_max_tokens": 3276,
3064      "can_reason": false,
3065      "supports_attachments": false,
3066      "options": {}
3067    },
3068    {
3069      "id": "alibaba/tongyi-deepresearch-30b-a3b",
3070      "name": "Tongyi DeepResearch 30B A3B",
3071      "cost_per_1m_in": 0.09,
3072      "cost_per_1m_out": 0.39999999999999997,
3073      "cost_per_1m_in_cached": 0,
3074      "cost_per_1m_out_cached": 0,
3075      "context_window": 131072,
3076      "default_max_tokens": 65536,
3077      "can_reason": true,
3078      "reasoning_levels": [
3079        "low",
3080        "medium",
3081        "high"
3082      ],
3083      "default_reasoning_effort": "medium",
3084      "supports_attachments": false,
3085      "options": {}
3086    },
3087    {
3088      "id": "alibaba/tongyi-deepresearch-30b-a3b:free",
3089      "name": "Tongyi DeepResearch 30B A3B (free)",
3090      "cost_per_1m_in": 0,
3091      "cost_per_1m_out": 0,
3092      "cost_per_1m_in_cached": 0,
3093      "cost_per_1m_out_cached": 0,
3094      "context_window": 131072,
3095      "default_max_tokens": 65536,
3096      "can_reason": true,
3097      "reasoning_levels": [
3098        "low",
3099        "medium",
3100        "high"
3101      ],
3102      "default_reasoning_effort": "medium",
3103      "supports_attachments": false,
3104      "options": {}
3105    },
3106    {
3107      "id": "xiaomi/mimo-v2-flash:free",
3108      "name": "Xiaomi: MiMo-V2-Flash (free)",
3109      "cost_per_1m_in": 0,
3110      "cost_per_1m_out": 0,
3111      "cost_per_1m_in_cached": 0,
3112      "cost_per_1m_out_cached": 0,
3113      "context_window": 262144,
3114      "default_max_tokens": 32768,
3115      "can_reason": true,
3116      "reasoning_levels": [
3117        "low",
3118        "medium",
3119        "high"
3120      ],
3121      "default_reasoning_effort": "medium",
3122      "supports_attachments": false,
3123      "options": {}
3124    },
3125    {
3126      "id": "z-ai/glm-4-32b",
3127      "name": "Z.AI: GLM 4 32B ",
3128      "cost_per_1m_in": 0.09999999999999999,
3129      "cost_per_1m_out": 0.09999999999999999,
3130      "cost_per_1m_in_cached": 0,
3131      "cost_per_1m_out_cached": 0,
3132      "context_window": 128000,
3133      "default_max_tokens": 12800,
3134      "can_reason": false,
3135      "supports_attachments": false,
3136      "options": {}
3137    },
3138    {
3139      "id": "z-ai/glm-4.5",
3140      "name": "Z.AI: GLM 4.5",
3141      "cost_per_1m_in": 0.48,
3142      "cost_per_1m_out": 1.76,
3143      "cost_per_1m_in_cached": 0,
3144      "cost_per_1m_out_cached": 0.088,
3145      "context_window": 131072,
3146      "default_max_tokens": 49152,
3147      "can_reason": true,
3148      "reasoning_levels": [
3149        "low",
3150        "medium",
3151        "high"
3152      ],
3153      "default_reasoning_effort": "medium",
3154      "supports_attachments": false,
3155      "options": {}
3156    },
3157    {
3158      "id": "z-ai/glm-4.5-air",
3159      "name": "Z.AI: GLM 4.5 Air",
3160      "cost_per_1m_in": 0.10400000000000001,
3161      "cost_per_1m_out": 0.6799999999999999,
3162      "cost_per_1m_in_cached": 0,
3163      "cost_per_1m_out_cached": 0,
3164      "context_window": 131072,
3165      "default_max_tokens": 49152,
3166      "can_reason": true,
3167      "reasoning_levels": [
3168        "low",
3169        "medium",
3170        "high"
3171      ],
3172      "default_reasoning_effort": "medium",
3173      "supports_attachments": false,
3174      "options": {}
3175    },
3176    {
3177      "id": "z-ai/glm-4.5-air:free",
3178      "name": "Z.AI: GLM 4.5 Air (free)",
3179      "cost_per_1m_in": 0,
3180      "cost_per_1m_out": 0,
3181      "cost_per_1m_in_cached": 0,
3182      "cost_per_1m_out_cached": 0,
3183      "context_window": 131072,
3184      "default_max_tokens": 48000,
3185      "can_reason": true,
3186      "reasoning_levels": [
3187        "low",
3188        "medium",
3189        "high"
3190      ],
3191      "default_reasoning_effort": "medium",
3192      "supports_attachments": false,
3193      "options": {}
3194    },
3195    {
3196      "id": "z-ai/glm-4.5v",
3197      "name": "Z.AI: GLM 4.5V",
3198      "cost_per_1m_in": 0.48,
3199      "cost_per_1m_out": 1.44,
3200      "cost_per_1m_in_cached": 0,
3201      "cost_per_1m_out_cached": 0.088,
3202      "context_window": 65536,
3203      "default_max_tokens": 8192,
3204      "can_reason": true,
3205      "reasoning_levels": [
3206        "low",
3207        "medium",
3208        "high"
3209      ],
3210      "default_reasoning_effort": "medium",
3211      "supports_attachments": true,
3212      "options": {}
3213    },
3214    {
3215      "id": "z-ai/glm-4.6",
3216      "name": "Z.AI: GLM 4.6",
3217      "cost_per_1m_in": 0.39,
3218      "cost_per_1m_out": 1.9,
3219      "cost_per_1m_in_cached": 0,
3220      "cost_per_1m_out_cached": 0,
3221      "context_window": 204800,
3222      "default_max_tokens": 102400,
3223      "can_reason": true,
3224      "reasoning_levels": [
3225        "low",
3226        "medium",
3227        "high"
3228      ],
3229      "default_reasoning_effort": "medium",
3230      "supports_attachments": false,
3231      "options": {}
3232    },
3233    {
3234      "id": "z-ai/glm-4.6:exacto",
3235      "name": "Z.AI: GLM 4.6 (exacto)",
3236      "cost_per_1m_in": 0.44,
3237      "cost_per_1m_out": 1.76,
3238      "cost_per_1m_in_cached": 0,
3239      "cost_per_1m_out_cached": 0,
3240      "context_window": 204800,
3241      "default_max_tokens": 65536,
3242      "can_reason": true,
3243      "reasoning_levels": [
3244        "low",
3245        "medium",
3246        "high"
3247      ],
3248      "default_reasoning_effort": "medium",
3249      "supports_attachments": false,
3250      "options": {}
3251    },
3252    {
3253      "id": "z-ai/glm-4.6v",
3254      "name": "Z.AI: GLM 4.6V",
3255      "cost_per_1m_in": 0.3,
3256      "cost_per_1m_out": 0.8999999999999999,
3257      "cost_per_1m_in_cached": 0,
3258      "cost_per_1m_out_cached": 0.049999999999999996,
3259      "context_window": 131072,
3260      "default_max_tokens": 12000,
3261      "can_reason": true,
3262      "reasoning_levels": [
3263        "low",
3264        "medium",
3265        "high"
3266      ],
3267      "default_reasoning_effort": "medium",
3268      "supports_attachments": true,
3269      "options": {}
3270    },
3271    {
3272      "id": "z-ai/glm-4.7",
3273      "name": "Z.AI: GLM 4.7",
3274      "cost_per_1m_in": 0.6,
3275      "cost_per_1m_out": 2.2,
3276      "cost_per_1m_in_cached": 0,
3277      "cost_per_1m_out_cached": 0.11,
3278      "context_window": 200000,
3279      "default_max_tokens": 65536,
3280      "can_reason": true,
3281      "reasoning_levels": [
3282        "low",
3283        "medium",
3284        "high"
3285      ],
3286      "default_reasoning_effort": "medium",
3287      "supports_attachments": false,
3288      "options": {}
3289    },
3290    {
3291      "id": "x-ai/grok-3",
3292      "name": "xAI: Grok 3",
3293      "cost_per_1m_in": 3,
3294      "cost_per_1m_out": 15,
3295      "cost_per_1m_in_cached": 0,
3296      "cost_per_1m_out_cached": 0.75,
3297      "context_window": 131072,
3298      "default_max_tokens": 13107,
3299      "can_reason": false,
3300      "supports_attachments": false,
3301      "options": {}
3302    },
3303    {
3304      "id": "x-ai/grok-3-beta",
3305      "name": "xAI: Grok 3 Beta",
3306      "cost_per_1m_in": 3,
3307      "cost_per_1m_out": 15,
3308      "cost_per_1m_in_cached": 0,
3309      "cost_per_1m_out_cached": 0.75,
3310      "context_window": 131072,
3311      "default_max_tokens": 13107,
3312      "can_reason": false,
3313      "supports_attachments": false,
3314      "options": {}
3315    },
3316    {
3317      "id": "x-ai/grok-3-mini",
3318      "name": "xAI: Grok 3 Mini",
3319      "cost_per_1m_in": 0.6,
3320      "cost_per_1m_out": 4,
3321      "cost_per_1m_in_cached": 0,
3322      "cost_per_1m_out_cached": 0.15,
3323      "context_window": 131072,
3324      "default_max_tokens": 13107,
3325      "can_reason": true,
3326      "reasoning_levels": [
3327        "low",
3328        "medium",
3329        "high"
3330      ],
3331      "default_reasoning_effort": "medium",
3332      "supports_attachments": false,
3333      "options": {}
3334    },
3335    {
3336      "id": "x-ai/grok-3-mini-beta",
3337      "name": "xAI: Grok 3 Mini Beta",
3338      "cost_per_1m_in": 0.6,
3339      "cost_per_1m_out": 4,
3340      "cost_per_1m_in_cached": 0,
3341      "cost_per_1m_out_cached": 0.15,
3342      "context_window": 131072,
3343      "default_max_tokens": 13107,
3344      "can_reason": true,
3345      "reasoning_levels": [
3346        "low",
3347        "medium",
3348        "high"
3349      ],
3350      "default_reasoning_effort": "medium",
3351      "supports_attachments": false,
3352      "options": {}
3353    },
3354    {
3355      "id": "x-ai/grok-4",
3356      "name": "xAI: Grok 4",
3357      "cost_per_1m_in": 3,
3358      "cost_per_1m_out": 15,
3359      "cost_per_1m_in_cached": 0,
3360      "cost_per_1m_out_cached": 0.75,
3361      "context_window": 256000,
3362      "default_max_tokens": 25600,
3363      "can_reason": true,
3364      "reasoning_levels": [
3365        "low",
3366        "medium",
3367        "high"
3368      ],
3369      "default_reasoning_effort": "medium",
3370      "supports_attachments": true,
3371      "options": {}
3372    },
3373    {
3374      "id": "x-ai/grok-4-fast",
3375      "name": "xAI: Grok 4 Fast",
3376      "cost_per_1m_in": 0.19999999999999998,
3377      "cost_per_1m_out": 0.5,
3378      "cost_per_1m_in_cached": 0,
3379      "cost_per_1m_out_cached": 0.049999999999999996,
3380      "context_window": 2000000,
3381      "default_max_tokens": 15000,
3382      "can_reason": true,
3383      "reasoning_levels": [
3384        "low",
3385        "medium",
3386        "high"
3387      ],
3388      "default_reasoning_effort": "medium",
3389      "supports_attachments": true,
3390      "options": {}
3391    },
3392    {
3393      "id": "x-ai/grok-4.1-fast",
3394      "name": "xAI: Grok 4.1 Fast",
3395      "cost_per_1m_in": 0.19999999999999998,
3396      "cost_per_1m_out": 0.5,
3397      "cost_per_1m_in_cached": 0,
3398      "cost_per_1m_out_cached": 0.049999999999999996,
3399      "context_window": 2000000,
3400      "default_max_tokens": 15000,
3401      "can_reason": true,
3402      "reasoning_levels": [
3403        "low",
3404        "medium",
3405        "high"
3406      ],
3407      "default_reasoning_effort": "medium",
3408      "supports_attachments": true,
3409      "options": {}
3410    },
3411    {
3412      "id": "x-ai/grok-code-fast-1",
3413      "name": "xAI: Grok Code Fast 1",
3414      "cost_per_1m_in": 0.19999999999999998,
3415      "cost_per_1m_out": 1.5,
3416      "cost_per_1m_in_cached": 0,
3417      "cost_per_1m_out_cached": 0.02,
3418      "context_window": 256000,
3419      "default_max_tokens": 5000,
3420      "can_reason": true,
3421      "reasoning_levels": [
3422        "low",
3423        "medium",
3424        "high"
3425      ],
3426      "default_reasoning_effort": "medium",
3427      "supports_attachments": false,
3428      "options": {}
3429    }
3430  ],
3431  "default_headers": {
3432    "HTTP-Referer": "https://charm.land",
3433    "X-Title": "Crush"
3434  }
3435}