cortecs.json

   1{
   2  "name": "Cortecs",
   3  "id": "cortecs",
   4  "api_key": "$CORTECS_API_KEY",
   5  "api_endpoint": "https://api.cortecs.ai/v1",
   6  "type": "openai",
   7  "default_large_model_id": "qwen3-coder-30b-a3b-instruct",
   8  "default_small_model_id": "glm-4.7-flash",
   9  "models": [
  10    {
  11      "id": "glm-5",
  12      "name": "GLM 5",
  13      "cost_per_1m_in": 0.932,
  14      "cost_per_1m_out": 2.982,
  15      "cost_per_1m_in_cached": 0,
  16      "cost_per_1m_out_cached": 0,
  17      "context_window": 202752,
  18      "default_max_tokens": 20275,
  19      "can_reason": true,
  20      "reasoning_levels": [
  21        "low",
  22        "medium",
  23        "high"
  24      ],
  25      "default_reasoning_effort": "medium",
  26      "supports_attachments": false
  27    },
  28    {
  29      "id": "glm-4.6",
  30      "name": "GLM 4.6",
  31      "cost_per_1m_in": 0.373,
  32      "cost_per_1m_out": 1.631,
  33      "cost_per_1m_in_cached": 0,
  34      "cost_per_1m_out_cached": 0,
  35      "context_window": 203000,
  36      "default_max_tokens": 20300,
  37      "can_reason": true,
  38      "reasoning_levels": [
  39        "low",
  40        "medium",
  41        "high"
  42      ],
  43      "default_reasoning_effort": "medium",
  44      "supports_attachments": false
  45    },
  46    {
  47      "id": "deepseek-chat-v3.1",
  48      "name": "DeepSeek Chat V3.1",
  49      "cost_per_1m_in": 0.186,
  50      "cost_per_1m_out": 0.745,
  51      "cost_per_1m_in_cached": 0,
  52      "cost_per_1m_out_cached": 0,
  53      "context_window": 164000,
  54      "default_max_tokens": 16400,
  55      "can_reason": true,
  56      "reasoning_levels": [
  57        "low",
  58        "medium",
  59        "high"
  60      ],
  61      "default_reasoning_effort": "medium",
  62      "supports_attachments": false
  63    },
  64    {
  65      "id": "qwen-2.5-72b-instruct",
  66      "name": "Qwen2.5 72B Instruct",
  67      "cost_per_1m_in": 0.065,
  68      "cost_per_1m_out": 0.242,
  69      "cost_per_1m_in_cached": 0,
  70      "cost_per_1m_out_cached": 0,
  71      "context_window": 33000,
  72      "default_max_tokens": 3300,
  73      "can_reason": false,
  74      "supports_attachments": false
  75    },
  76    {
  77      "id": "qwen3.5-397b-a17b",
  78      "name": "Qwen3.5 397B A17B ",
  79      "cost_per_1m_in": 0.559,
  80      "cost_per_1m_out": 3.354,
  81      "cost_per_1m_in_cached": 0,
  82      "cost_per_1m_out_cached": 0,
  83      "context_window": 262000,
  84      "default_max_tokens": 25000,
  85      "can_reason": true,
  86      "reasoning_levels": [
  87        "low",
  88        "medium",
  89        "high"
  90      ],
  91      "default_reasoning_effort": "medium",
  92      "supports_attachments": false
  93    },
  94    {
  95      "id": "deepseek-v3.2",
  96      "name": "DeepSeek V3.2",
  97      "cost_per_1m_in": 0.28,
  98      "cost_per_1m_out": 0.466,
  99      "cost_per_1m_in_cached": 0,
 100      "cost_per_1m_out_cached": 0,
 101      "context_window": 163840,
 102      "default_max_tokens": 16384,
 103      "can_reason": true,
 104      "reasoning_levels": [
 105        "low",
 106        "medium",
 107        "high"
 108      ],
 109      "default_reasoning_effort": "medium",
 110      "supports_attachments": false
 111    },
 112    {
 113      "id": "mistral-small-2603",
 114      "name": "Mistral Small 4 2603",
 115      "cost_per_1m_in": 0.134,
 116      "cost_per_1m_out": 0.536,
 117      "cost_per_1m_in_cached": 0,
 118      "cost_per_1m_out_cached": 0,
 119      "context_window": 256000,
 120      "default_max_tokens": 25600,
 121      "can_reason": true,
 122      "reasoning_levels": [
 123        "low",
 124        "medium",
 125        "high"
 126      ],
 127      "default_reasoning_effort": "medium",
 128      "supports_attachments": true
 129    },
 130    {
 131      "id": "minimax-m2.5",
 132      "name": "MiniMax M2.5",
 133      "cost_per_1m_in": 0.28,
 134      "cost_per_1m_out": 1.025,
 135      "cost_per_1m_in_cached": 0,
 136      "cost_per_1m_out_cached": 0,
 137      "context_window": 196608,
 138      "default_max_tokens": 6553,
 139      "can_reason": true,
 140      "reasoning_levels": [
 141        "low",
 142        "medium",
 143        "high"
 144      ],
 145      "default_reasoning_effort": "medium",
 146      "supports_attachments": false
 147    },
 148    {
 149      "id": "claude-4-6-sonnet",
 150      "name": "Claude Sonnet 4.6",
 151      "cost_per_1m_in": 3.099,
 152      "cost_per_1m_out": 15.495,
 153      "cost_per_1m_in_cached": 0,
 154      "cost_per_1m_out_cached": 0,
 155      "context_window": 1000000,
 156      "default_max_tokens": 100000,
 157      "can_reason": true,
 158      "reasoning_levels": [
 159        "low",
 160        "medium",
 161        "high"
 162      ],
 163      "default_reasoning_effort": "medium",
 164      "supports_attachments": true
 165    },
 166    {
 167      "id": "glm-4.7-flash",
 168      "name": "GLM 4.7 Flash",
 169      "cost_per_1m_in": 0.075,
 170      "cost_per_1m_out": 0.451,
 171      "cost_per_1m_in_cached": 0,
 172      "cost_per_1m_out_cached": 0,
 173      "context_window": 203000,
 174      "default_max_tokens": 20300,
 175      "can_reason": false,
 176      "supports_attachments": false
 177    },
 178    {
 179      "id": "kimi-k2.5",
 180      "name": "Kimi K2.5",
 181      "cost_per_1m_in": 0.466,
 182      "cost_per_1m_out": 2.236,
 183      "cost_per_1m_in_cached": 0,
 184      "cost_per_1m_out_cached": 0,
 185      "context_window": 256000,
 186      "default_max_tokens": 25600,
 187      "can_reason": true,
 188      "reasoning_levels": [
 189        "low",
 190        "medium",
 191        "high"
 192      ],
 193      "default_reasoning_effort": "medium",
 194      "supports_attachments": false
 195    },
 196    {
 197      "id": "claude-opus4-6",
 198      "name": "Claude Opus 4.6",
 199      "cost_per_1m_in": 5.165,
 200      "cost_per_1m_out": 25.826,
 201      "cost_per_1m_in_cached": 0,
 202      "cost_per_1m_out_cached": 0,
 203      "context_window": 1000000,
 204      "default_max_tokens": 100000,
 205      "can_reason": true,
 206      "reasoning_levels": [
 207        "low",
 208        "medium",
 209        "high"
 210      ],
 211      "default_reasoning_effort": "medium",
 212      "supports_attachments": true
 213    },
 214    {
 215      "id": "minimax-m2",
 216      "name": "MiniMax M2",
 217      "cost_per_1m_in": 0.233,
 218      "cost_per_1m_out": 0.932,
 219      "cost_per_1m_in_cached": 0,
 220      "cost_per_1m_out_cached": 0,
 221      "context_window": 196608,
 222      "default_max_tokens": 19660,
 223      "can_reason": true,
 224      "reasoning_levels": [
 225        "low",
 226        "medium",
 227        "high"
 228      ],
 229      "default_reasoning_effort": "medium",
 230      "supports_attachments": false
 231    },
 232    {
 233      "id": "glm-4.7",
 234      "name": "GLM 4.7",
 235      "cost_per_1m_in": 0.376,
 236      "cost_per_1m_out": 1.878,
 237      "cost_per_1m_in_cached": 0,
 238      "cost_per_1m_out_cached": 0,
 239      "context_window": 200000,
 240      "default_max_tokens": 20000,
 241      "can_reason": true,
 242      "reasoning_levels": [
 243        "low",
 244        "medium",
 245        "high"
 246      ],
 247      "default_reasoning_effort": "medium",
 248      "supports_attachments": false
 249    },
 250    {
 251      "id": "minimax-m2.1",
 252      "name": "MiniMax M2.1",
 253      "cost_per_1m_in": 0.282,
 254      "cost_per_1m_out": 1.127,
 255      "cost_per_1m_in_cached": 0,
 256      "cost_per_1m_out_cached": 0,
 257      "context_window": 196000,
 258      "default_max_tokens": 19600,
 259      "can_reason": true,
 260      "reasoning_levels": [
 261        "low",
 262        "medium",
 263        "high"
 264      ],
 265      "default_reasoning_effort": "medium",
 266      "supports_attachments": false
 267    },
 268    {
 269      "id": "llama-guard-3-8b",
 270      "name": "Llama Guard 3 8B",
 271      "cost_per_1m_in": 0.019,
 272      "cost_per_1m_out": 0.056,
 273      "cost_per_1m_in_cached": 0,
 274      "cost_per_1m_out_cached": 0,
 275      "context_window": 128000,
 276      "default_max_tokens": 12800,
 277      "can_reason": false,
 278      "supports_attachments": false
 279    },
 280    {
 281      "id": "qwen3-vl-235b-a22b",
 282      "name": "Qwen3 VL 235B A22B",
 283      "cost_per_1m_in": 0.196,
 284      "cost_per_1m_out": 1.77,
 285      "cost_per_1m_in_cached": 0,
 286      "cost_per_1m_out_cached": 0,
 287      "context_window": 131000,
 288      "default_max_tokens": 13100,
 289      "can_reason": true,
 290      "reasoning_levels": [
 291        "low",
 292        "medium",
 293        "high"
 294      ],
 295      "default_reasoning_effort": "medium",
 296      "supports_attachments": true
 297    },
 298    {
 299      "id": "mistral-small-creative",
 300      "name": "Mistral Small Creative",
 301      "cost_per_1m_in": 0.105,
 302      "cost_per_1m_out": 0.315,
 303      "cost_per_1m_in_cached": 0,
 304      "cost_per_1m_out_cached": 0,
 305      "context_window": 32000,
 306      "default_max_tokens": 3200,
 307      "can_reason": false,
 308      "supports_attachments": false
 309    },
 310    {
 311      "id": "nvidia-nemotron-3-nano-30b-a3b",
 312      "name": "Nemotron 3 Nano 30B A3B",
 313      "cost_per_1m_in": 0.056,
 314      "cost_per_1m_out": 0.226,
 315      "cost_per_1m_in_cached": 0,
 316      "cost_per_1m_out_cached": 0,
 317      "context_window": 128000,
 318      "default_max_tokens": 12800,
 319      "can_reason": true,
 320      "reasoning_levels": [
 321        "low",
 322        "medium",
 323        "high"
 324      ],
 325      "default_reasoning_effort": "medium",
 326      "supports_attachments": false
 327    },
 328    {
 329      "id": "claude-opus4-5",
 330      "name": "Claude Opus 4.5",
 331      "cost_per_1m_in": 5.165,
 332      "cost_per_1m_out": 25.826,
 333      "cost_per_1m_in_cached": 0,
 334      "cost_per_1m_out_cached": 0,
 335      "context_window": 200000,
 336      "default_max_tokens": 20000,
 337      "can_reason": true,
 338      "reasoning_levels": [
 339        "low",
 340        "medium",
 341        "high"
 342      ],
 343      "default_reasoning_effort": "medium",
 344      "supports_attachments": true
 345    },
 346    {
 347      "id": "qwen3-next-80b-a3b-thinking",
 348      "name": "Qwen3 Next 80B A3B Thinking",
 349      "cost_per_1m_in": 0.14,
 350      "cost_per_1m_out": 1.118,
 351      "cost_per_1m_in_cached": 0,
 352      "cost_per_1m_out_cached": 0,
 353      "context_window": 262000,
 354      "default_max_tokens": 12800,
 355      "can_reason": true,
 356      "reasoning_levels": [
 357        "low",
 358        "medium",
 359        "high"
 360      ],
 361      "default_reasoning_effort": "medium",
 362      "supports_attachments": false
 363    },
 364    {
 365      "id": "holo2-30b-a3b",
 366      "name": "Holo2 30B A3B",
 367      "cost_per_1m_in": 0.315,
 368      "cost_per_1m_out": 0.735,
 369      "cost_per_1m_in_cached": 0,
 370      "cost_per_1m_out_cached": 0,
 371      "context_window": 22000,
 372      "default_max_tokens": 2200,
 373      "can_reason": true,
 374      "reasoning_levels": [
 375        "low",
 376        "medium",
 377        "high"
 378      ],
 379      "default_reasoning_effort": "medium",
 380      "supports_attachments": true
 381    },
 382    {
 383      "id": "devstral-2512",
 384      "name": "Devstral 2 2512",
 385      "cost_per_1m_in": 0.42,
 386      "cost_per_1m_out": 2.1,
 387      "cost_per_1m_in_cached": 0,
 388      "cost_per_1m_out_cached": 0,
 389      "context_window": 262000,
 390      "default_max_tokens": 20000,
 391      "can_reason": false,
 392      "supports_attachments": false
 393    },
 394    {
 395      "id": "nova-2-lite",
 396      "name": "Nova 2 Lite",
 397      "cost_per_1m_in": 0.352,
 398      "cost_per_1m_out": 2.963,
 399      "cost_per_1m_in_cached": 0,
 400      "cost_per_1m_out_cached": 0,
 401      "context_window": 1000000,
 402      "default_max_tokens": 100000,
 403      "can_reason": true,
 404      "reasoning_levels": [
 405        "low",
 406        "medium",
 407        "high"
 408      ],
 409      "default_reasoning_effort": "medium",
 410      "supports_attachments": true
 411    },
 412    {
 413      "id": "gpt-oss-safeguard-120b",
 414      "name": "GPT OSS Safeguard 120B",
 415      "cost_per_1m_in": 0.169,
 416      "cost_per_1m_out": 0.657,
 417      "cost_per_1m_in_cached": 0,
 418      "cost_per_1m_out_cached": 0,
 419      "context_window": 128000,
 420      "default_max_tokens": 12800,
 421      "can_reason": true,
 422      "reasoning_levels": [
 423        "low",
 424        "medium",
 425        "high"
 426      ],
 427      "default_reasoning_effort": "medium",
 428      "supports_attachments": false
 429    },
 430    {
 431      "id": "mistral-large-2512",
 432      "name": "Mistral Large 3 2512",
 433      "cost_per_1m_in": 0.525,
 434      "cost_per_1m_out": 1.575,
 435      "cost_per_1m_in_cached": 0,
 436      "cost_per_1m_out_cached": 0,
 437      "context_window": 256000,
 438      "default_max_tokens": 25600,
 439      "can_reason": false,
 440      "supports_attachments": true
 441    },
 442    {
 443      "id": "ministral-8b-2512",
 444      "name": "Ministral 3 8b 2512",
 445      "cost_per_1m_in": 0.158,
 446      "cost_per_1m_out": 0.158,
 447      "cost_per_1m_in_cached": 0,
 448      "cost_per_1m_out_cached": 0,
 449      "context_window": 256000,
 450      "default_max_tokens": 25600,
 451      "can_reason": false,
 452      "supports_attachments": true
 453    },
 454    {
 455      "id": "ministral-3b-2512",
 456      "name": "Ministral 3 3b 2512",
 457      "cost_per_1m_in": 0.105,
 458      "cost_per_1m_out": 0.105,
 459      "cost_per_1m_in_cached": 0,
 460      "cost_per_1m_out_cached": 0,
 461      "context_window": 256000,
 462      "default_max_tokens": 25600,
 463      "can_reason": false,
 464      "supports_attachments": true
 465    },
 466    {
 467      "id": "ministral-14b-2512",
 468      "name": "Ministral 3 14b 2512",
 469      "cost_per_1m_in": 0.21,
 470      "cost_per_1m_out": 0.21,
 471      "cost_per_1m_in_cached": 0,
 472      "cost_per_1m_out_cached": 0,
 473      "context_window": 256000,
 474      "default_max_tokens": 25600,
 475      "can_reason": false,
 476      "supports_attachments": true
 477    },
 478    {
 479      "id": "kimi-k2-thinking",
 480      "name": "Kimi K2 Thinking",
 481      "cost_per_1m_in": 0.564,
 482      "cost_per_1m_out": 2.348,
 483      "cost_per_1m_in_cached": 0,
 484      "cost_per_1m_out_cached": 0,
 485      "context_window": 262000,
 486      "default_max_tokens": 26200,
 487      "can_reason": true,
 488      "reasoning_levels": [
 489        "low",
 490        "medium",
 491        "high"
 492      ],
 493      "default_reasoning_effort": "medium",
 494      "supports_attachments": false
 495    },
 496    {
 497      "id": "intellect-3",
 498      "name": "INTELLECT-3",
 499      "cost_per_1m_in": 0.188,
 500      "cost_per_1m_out": 1.033,
 501      "cost_per_1m_in_cached": 0,
 502      "cost_per_1m_out_cached": 0,
 503      "context_window": 128000,
 504      "default_max_tokens": 12800,
 505      "can_reason": true,
 506      "reasoning_levels": [
 507        "low",
 508        "medium",
 509        "high"
 510      ],
 511      "default_reasoning_effort": "medium",
 512      "supports_attachments": false
 513    },
 514    {
 515      "id": "gpt-5.1",
 516      "name": "GPT 5.1",
 517      "cost_per_1m_in": 1.296,
 518      "cost_per_1m_out": 10.33,
 519      "cost_per_1m_in_cached": 0,
 520      "cost_per_1m_out_cached": 0,
 521      "context_window": 400000,
 522      "default_max_tokens": 40000,
 523      "can_reason": true,
 524      "reasoning_levels": [
 525        "low",
 526        "medium",
 527        "high"
 528      ],
 529      "default_reasoning_effort": "medium",
 530      "supports_attachments": true
 531    },
 532    {
 533      "id": "nemotron-nano-v2-12b",
 534      "name": "Nemotron Nano V2 12b",
 535      "cost_per_1m_in": 0.066,
 536      "cost_per_1m_out": 0.188,
 537      "cost_per_1m_in_cached": 0,
 538      "cost_per_1m_out_cached": 0,
 539      "context_window": 128000,
 540      "default_max_tokens": 12800,
 541      "can_reason": true,
 542      "reasoning_levels": [
 543        "low",
 544        "medium",
 545        "high"
 546      ],
 547      "default_reasoning_effort": "medium",
 548      "supports_attachments": true
 549    },
 550    {
 551      "id": "claude-haiku-4-5",
 552      "name": "Claude Haiku 4.5",
 553      "cost_per_1m_in": 0.939,
 554      "cost_per_1m_out": 4.696,
 555      "cost_per_1m_in_cached": 0,
 556      "cost_per_1m_out_cached": 0,
 557      "context_window": 200000,
 558      "default_max_tokens": 20000,
 559      "can_reason": true,
 560      "reasoning_levels": [
 561        "low",
 562        "medium",
 563        "high"
 564      ],
 565      "default_reasoning_effort": "medium",
 566      "supports_attachments": true
 567    },
 568    {
 569      "id": "claude-4-5-sonnet",
 570      "name": "Claude 4.5 Sonnet",
 571      "cost_per_1m_in": 2.817,
 572      "cost_per_1m_out": 14.087,
 573      "cost_per_1m_in_cached": 0,
 574      "cost_per_1m_out_cached": 0,
 575      "context_window": 200000,
 576      "default_max_tokens": 20000,
 577      "can_reason": true,
 578      "reasoning_levels": [
 579        "low",
 580        "medium",
 581        "high"
 582      ],
 583      "default_reasoning_effort": "medium",
 584      "supports_attachments": true
 585    },
 586    {
 587      "id": "magistral-medium-2509",
 588      "name": "Magistral Medium 2509",
 589      "cost_per_1m_in": 2.1,
 590      "cost_per_1m_out": 5.25,
 591      "cost_per_1m_in_cached": 0,
 592      "cost_per_1m_out_cached": 0,
 593      "context_window": 128000,
 594      "default_max_tokens": 12800,
 595      "can_reason": true,
 596      "reasoning_levels": [
 597        "low",
 598        "medium",
 599        "high"
 600      ],
 601      "default_reasoning_effort": "medium",
 602      "supports_attachments": true
 603    },
 604    {
 605      "id": "magistral-small-2509",
 606      "name": "Magistral Small 2509",
 607      "cost_per_1m_in": 0.525,
 608      "cost_per_1m_out": 1.575,
 609      "cost_per_1m_in_cached": 0,
 610      "cost_per_1m_out_cached": 0,
 611      "context_window": 128000,
 612      "default_max_tokens": 12800,
 613      "can_reason": true,
 614      "reasoning_levels": [
 615        "low",
 616        "medium",
 617        "high"
 618      ],
 619      "default_reasoning_effort": "medium",
 620      "supports_attachments": true
 621    },
 622    {
 623      "id": "hermes-4-70b",
 624      "name": "Hermes 4 70B",
 625      "cost_per_1m_in": 0.122,
 626      "cost_per_1m_out": 0.376,
 627      "cost_per_1m_in_cached": 0,
 628      "cost_per_1m_out_cached": 0,
 629      "context_window": 128000,
 630      "default_max_tokens": 12800,
 631      "can_reason": false,
 632      "supports_attachments": false
 633    },
 634    {
 635      "id": "gpt-5",
 636      "name": "GPT 5",
 637      "cost_per_1m_in": 1.296,
 638      "cost_per_1m_out": 10.33,
 639      "cost_per_1m_in_cached": 0,
 640      "cost_per_1m_out_cached": 0,
 641      "context_window": 400000,
 642      "default_max_tokens": 40000,
 643      "can_reason": true,
 644      "reasoning_levels": [
 645        "low",
 646        "medium",
 647        "high"
 648      ],
 649      "default_reasoning_effort": "medium",
 650      "supports_attachments": true
 651    },
 652    {
 653      "id": "qwen3-235b-a22b-thinking-2507",
 654      "name": "Qwen3 235B A22B Thinking 2507",
 655      "cost_per_1m_in": 0.188,
 656      "cost_per_1m_out": 0.752,
 657      "cost_per_1m_in_cached": 0,
 658      "cost_per_1m_out_cached": 0,
 659      "context_window": 262000,
 660      "default_max_tokens": 26200,
 661      "can_reason": true,
 662      "reasoning_levels": [
 663        "low",
 664        "medium",
 665        "high"
 666      ],
 667      "default_reasoning_effort": "medium",
 668      "supports_attachments": false
 669    },
 670    {
 671      "id": "gpt-oss-120b",
 672      "name": "GPT Oss 120b",
 673      "cost_per_1m_in": 0.037,
 674      "cost_per_1m_out": 0.186,
 675      "cost_per_1m_in_cached": 0,
 676      "cost_per_1m_out_cached": 0,
 677      "context_window": 131000,
 678      "default_max_tokens": 12800,
 679      "can_reason": true,
 680      "reasoning_levels": [
 681        "low",
 682        "medium",
 683        "high"
 684      ],
 685      "default_reasoning_effort": "medium",
 686      "supports_attachments": false
 687    },
 688    {
 689      "id": "qwen3-30b-a3b-instruct-2507",
 690      "name": "Qwen3 30B A3B Instruct 2507",
 691      "cost_per_1m_in": 0.093,
 692      "cost_per_1m_out": 0.281,
 693      "cost_per_1m_in_cached": 0,
 694      "cost_per_1m_out_cached": 0,
 695      "context_window": 262000,
 696      "default_max_tokens": 26200,
 697      "can_reason": true,
 698      "reasoning_levels": [
 699        "low",
 700        "medium",
 701        "high"
 702      ],
 703      "default_reasoning_effort": "medium",
 704      "supports_attachments": false
 705    },
 706    {
 707      "id": "qwen3-30b-a3b-thinking-2507",
 708      "name": "Qwen3 30B A3B Thinking 2507",
 709      "cost_per_1m_in": 0.093,
 710      "cost_per_1m_out": 0.281,
 711      "cost_per_1m_in_cached": 0,
 712      "cost_per_1m_out_cached": 0,
 713      "context_window": 262000,
 714      "default_max_tokens": 26200,
 715      "can_reason": true,
 716      "reasoning_levels": [
 717        "low",
 718        "medium",
 719        "high"
 720      ],
 721      "default_reasoning_effort": "medium",
 722      "supports_attachments": false
 723    },
 724    {
 725      "id": "qwen3-coder-480b-a35b-instruct",
 726      "name": "Qwen3 Coder 480B A35B Instruct",
 727      "cost_per_1m_in": 0.376,
 728      "cost_per_1m_out": 1.691,
 729      "cost_per_1m_in_cached": 0,
 730      "cost_per_1m_out_cached": 0,
 731      "context_window": 262000,
 732      "default_max_tokens": 26200,
 733      "can_reason": true,
 734      "reasoning_levels": [
 735        "low",
 736        "medium",
 737        "high"
 738      ],
 739      "default_reasoning_effort": "medium",
 740      "supports_attachments": false
 741    },
 742    {
 743      "id": "gpt-oss-20b",
 744      "name": "GPT Oss 20b",
 745      "cost_per_1m_in": 0.028,
 746      "cost_per_1m_out": 0.13,
 747      "cost_per_1m_in_cached": 0,
 748      "cost_per_1m_out_cached": 0,
 749      "context_window": 131000,
 750      "default_max_tokens": 12800,
 751      "can_reason": true,
 752      "reasoning_levels": [
 753        "low",
 754        "medium",
 755        "high"
 756      ],
 757      "default_reasoning_effort": "medium",
 758      "supports_attachments": false
 759    },
 760    {
 761      "id": "kimi-k2-instruct",
 762      "name": "Kimi K2 Instruct",
 763      "cost_per_1m_in": 0.469,
 764      "cost_per_1m_out": 2.254,
 765      "cost_per_1m_in_cached": 0,
 766      "cost_per_1m_out_cached": 0,
 767      "context_window": 131000,
 768      "default_max_tokens": 13100,
 769      "can_reason": false,
 770      "supports_attachments": false
 771    },
 772    {
 773      "id": "glm-4.5",
 774      "name": "GLM 4.5",
 775      "cost_per_1m_in": 0.564,
 776      "cost_per_1m_out": 2.066,
 777      "cost_per_1m_in_cached": 0,
 778      "cost_per_1m_out_cached": 0,
 779      "context_window": 128000,
 780      "default_max_tokens": 12800,
 781      "can_reason": true,
 782      "reasoning_levels": [
 783        "low",
 784        "medium",
 785        "high"
 786      ],
 787      "default_reasoning_effort": "medium",
 788      "supports_attachments": false
 789    },
 790    {
 791      "id": "glm-4.5-air",
 792      "name": "GLM 4.5 Air",
 793      "cost_per_1m_in": 0.188,
 794      "cost_per_1m_out": 1.127,
 795      "cost_per_1m_in_cached": 0,
 796      "cost_per_1m_out_cached": 0,
 797      "context_window": 128000,
 798      "default_max_tokens": 12800,
 799      "can_reason": true,
 800      "reasoning_levels": [
 801        "low",
 802        "medium",
 803        "high"
 804      ],
 805      "default_reasoning_effort": "medium",
 806      "supports_attachments": false
 807    },
 808    {
 809      "id": "mistral-7b-instruct-v0.3",
 810      "name": "Mistral 7B Instruct v0.3",
 811      "cost_per_1m_in": 0.105,
 812      "cost_per_1m_out": 0.105,
 813      "cost_per_1m_in_cached": 0,
 814      "cost_per_1m_out_cached": 0,
 815      "context_window": 127000,
 816      "default_max_tokens": 12700,
 817      "can_reason": false,
 818      "supports_attachments": false
 819    },
 820    {
 821      "id": "mistral-large-2402",
 822      "name": "Mistral Large 2402",
 823      "cost_per_1m_in": 4.038,
 824      "cost_per_1m_out": 12.208,
 825      "cost_per_1m_in_cached": 0,
 826      "cost_per_1m_out_cached": 0,
 827      "context_window": 32000,
 828      "default_max_tokens": 3200,
 829      "can_reason": true,
 830      "reasoning_levels": [
 831        "low",
 832        "medium",
 833        "high"
 834      ],
 835      "default_reasoning_effort": "medium",
 836      "supports_attachments": false
 837    },
 838    {
 839      "id": "pixtral-large-2502",
 840      "name": "Pixtral Large 25.02",
 841      "cost_per_1m_in": 1.878,
 842      "cost_per_1m_out": 5.634,
 843      "cost_per_1m_in_cached": 0,
 844      "cost_per_1m_out_cached": 0,
 845      "context_window": 128000,
 846      "default_max_tokens": 12800,
 847      "can_reason": true,
 848      "reasoning_levels": [
 849        "low",
 850        "medium",
 851        "high"
 852      ],
 853      "default_reasoning_effort": "medium",
 854      "supports_attachments": true
 855    },
 856    {
 857      "id": "mistral-small-3.2-24b-instruct-2506",
 858      "name": "Mistral Small 3.2 24B Instruct 2506",
 859      "cost_per_1m_in": 0.095,
 860      "cost_per_1m_out": 0.294,
 861      "cost_per_1m_in_cached": 0,
 862      "cost_per_1m_out_cached": 0,
 863      "context_window": 128000,
 864      "default_max_tokens": 12800,
 865      "can_reason": false,
 866      "supports_attachments": true
 867    },
 868    {
 869      "id": "qwen3-32b",
 870      "name": "Qwen3 32B",
 871      "cost_per_1m_in": 0.093,
 872      "cost_per_1m_out": 0.281,
 873      "cost_per_1m_in_cached": 0,
 874      "cost_per_1m_out_cached": 0,
 875      "context_window": 40000,
 876      "default_max_tokens": 1638,
 877      "can_reason": true,
 878      "reasoning_levels": [
 879        "low",
 880        "medium",
 881        "high"
 882      ],
 883      "default_reasoning_effort": "medium",
 884      "supports_attachments": false
 885    },
 886    {
 887      "id": "qwen3-235b-a22b-instruct-2507",
 888      "name": "Qwen3 235B A22B Instruct 2507",
 889      "cost_per_1m_in": 0.065,
 890      "cost_per_1m_out": 0.429,
 891      "cost_per_1m_in_cached": 0,
 892      "cost_per_1m_out_cached": 0,
 893      "context_window": 131000,
 894      "default_max_tokens": 13100,
 895      "can_reason": true,
 896      "reasoning_levels": [
 897        "low",
 898        "medium",
 899        "high"
 900      ],
 901      "default_reasoning_effort": "medium",
 902      "supports_attachments": false
 903    },
 904    {
 905      "id": "qwen3-coder-30b-a3b-instruct",
 906      "name": "Qwen3 Coder 30b a3b Instruct",
 907      "cost_per_1m_in": 0.056,
 908      "cost_per_1m_out": 0.233,
 909      "cost_per_1m_in_cached": 0,
 910      "cost_per_1m_out_cached": 0,
 911      "context_window": 262000,
 912      "default_max_tokens": 3200,
 913      "can_reason": true,
 914      "reasoning_levels": [
 915        "low",
 916        "medium",
 917        "high"
 918      ],
 919      "default_reasoning_effort": "medium",
 920      "supports_attachments": false
 921    },
 922    {
 923      "id": "gpt-4.1",
 924      "name": "GPT 4.1",
 925      "cost_per_1m_in": 2.066,
 926      "cost_per_1m_out": 8.266,
 927      "cost_per_1m_in_cached": 0,
 928      "cost_per_1m_out_cached": 0,
 929      "context_window": 1047576,
 930      "default_max_tokens": 104757,
 931      "can_reason": true,
 932      "reasoning_levels": [
 933        "low",
 934        "medium",
 935        "high"
 936      ],
 937      "default_reasoning_effort": "medium",
 938      "supports_attachments": true
 939    },
 940    {
 941      "id": "gpt-4.1-mini",
 942      "name": "GPT 4.1 mini",
 943      "cost_per_1m_in": 0.41,
 944      "cost_per_1m_out": 1.607,
 945      "cost_per_1m_in_cached": 0,
 946      "cost_per_1m_out_cached": 0,
 947      "context_window": 1047576,
 948      "default_max_tokens": 104757,
 949      "can_reason": true,
 950      "reasoning_levels": [
 951        "low",
 952        "medium",
 953        "high"
 954      ],
 955      "default_reasoning_effort": "medium",
 956      "supports_attachments": true
 957    },
 958    {
 959      "id": "gpt-4.1-nano",
 960      "name": "GPT 4.1 nano",
 961      "cost_per_1m_in": 0.105,
 962      "cost_per_1m_out": 0.41,
 963      "cost_per_1m_in_cached": 0,
 964      "cost_per_1m_out_cached": 0,
 965      "context_window": 1047576,
 966      "default_max_tokens": 104757,
 967      "can_reason": true,
 968      "reasoning_levels": [
 969        "low",
 970        "medium",
 971        "high"
 972      ],
 973      "default_reasoning_effort": "medium",
 974      "supports_attachments": true
 975    },
 976    {
 977      "id": "nova-micro-v1",
 978      "name": "Nova Micro 1.0",
 979      "cost_per_1m_in": 0.038,
 980      "cost_per_1m_out": 0.15,
 981      "cost_per_1m_in_cached": 0,
 982      "cost_per_1m_out_cached": 0,
 983      "context_window": 128000,
 984      "default_max_tokens": 12800,
 985      "can_reason": true,
 986      "reasoning_levels": [
 987        "low",
 988        "medium",
 989        "high"
 990      ],
 991      "default_reasoning_effort": "medium",
 992      "supports_attachments": true
 993    },
 994    {
 995      "id": "nova-lite-v1",
 996      "name": "Nova Lite 1.0",
 997      "cost_per_1m_in": 0.065,
 998      "cost_per_1m_out": 0.259,
 999      "cost_per_1m_in_cached": 0,
1000      "cost_per_1m_out_cached": 0,
1001      "context_window": 300000,
1002      "default_max_tokens": 30000,
1003      "can_reason": true,
1004      "reasoning_levels": [
1005        "low",
1006        "medium",
1007        "high"
1008      ],
1009      "default_reasoning_effort": "medium",
1010      "supports_attachments": true
1011    },
1012    {
1013      "id": "nova-pro-v1",
1014      "name": "Nova Pro 1.0",
1015      "cost_per_1m_in": 0.865,
1016      "cost_per_1m_out": 3.46,
1017      "cost_per_1m_in_cached": 0,
1018      "cost_per_1m_out_cached": 0,
1019      "context_window": 300000,
1020      "default_max_tokens": 30000,
1021      "can_reason": true,
1022      "reasoning_levels": [
1023        "low",
1024        "medium",
1025        "high"
1026      ],
1027      "default_reasoning_effort": "medium",
1028      "supports_attachments": true
1029    },
1030    {
1031      "id": "claude-sonnet-4",
1032      "name": "Claude Sonnet 4",
1033      "cost_per_1m_in": 2.817,
1034      "cost_per_1m_out": 14.087,
1035      "cost_per_1m_in_cached": 0,
1036      "cost_per_1m_out_cached": 0,
1037      "context_window": 200000,
1038      "default_max_tokens": 20000,
1039      "can_reason": true,
1040      "reasoning_levels": [
1041        "low",
1042        "medium",
1043        "high"
1044      ],
1045      "default_reasoning_effort": "medium",
1046      "supports_attachments": true
1047    },
1048    {
1049      "id": "claude-3-7-sonnet",
1050      "name": "Claude 3.7 Sonnet",
1051      "cost_per_1m_in": 2.817,
1052      "cost_per_1m_out": 14.087,
1053      "cost_per_1m_in_cached": 0,
1054      "cost_per_1m_out_cached": 0,
1055      "context_window": 200000,
1056      "default_max_tokens": 20000,
1057      "can_reason": true,
1058      "reasoning_levels": [
1059        "low",
1060        "medium",
1061        "high"
1062      ],
1063      "default_reasoning_effort": "medium",
1064      "supports_attachments": true
1065    },
1066    {
1067      "id": "llama-3.1-nemotron-ultra-253b-v1",
1068      "name": "Llama 3.1 Nemotron Ultra 253B v1",
1069      "cost_per_1m_in": 0.564,
1070      "cost_per_1m_out": 1.691,
1071      "cost_per_1m_in_cached": 0,
1072      "cost_per_1m_out_cached": 0,
1073      "context_window": 128000,
1074      "default_max_tokens": 12800,
1075      "can_reason": true,
1076      "reasoning_levels": [
1077        "low",
1078        "medium",
1079        "high"
1080      ],
1081      "default_reasoning_effort": "medium",
1082      "supports_attachments": false
1083    },
1084    {
1085      "id": "llama-4-maverick",
1086      "name": "Llama 4 Maverick",
1087      "cost_per_1m_in": 0.13,
1088      "cost_per_1m_out": 0.633,
1089      "cost_per_1m_in_cached": 0,
1090      "cost_per_1m_out_cached": 0,
1091      "context_window": 1050000,
1092      "default_max_tokens": 105000,
1093      "can_reason": false,
1094      "supports_attachments": false
1095    },
1096    {
1097      "id": "deepseek-v3-0324",
1098      "name": "DeepSeek V3 0324",
1099      "cost_per_1m_in": 0.28,
1100      "cost_per_1m_out": 0.932,
1101      "cost_per_1m_in_cached": 0,
1102      "cost_per_1m_out_cached": 0,
1103      "context_window": 163840,
1104      "default_max_tokens": 12800,
1105      "can_reason": true,
1106      "reasoning_levels": [
1107        "low",
1108        "medium",
1109        "high"
1110      ],
1111      "default_reasoning_effort": "medium",
1112      "supports_attachments": false
1113    },
1114    {
1115      "id": "mistral-small-2503",
1116      "name": "Mistral Small 2503",
1117      "cost_per_1m_in": 0.105,
1118      "cost_per_1m_out": 0.315,
1119      "cost_per_1m_in_cached": 0,
1120      "cost_per_1m_out_cached": 0,
1121      "context_window": 128000,
1122      "default_max_tokens": 12800,
1123      "can_reason": false,
1124      "supports_attachments": true
1125    },
1126    {
1127      "id": "mistral-small-2506",
1128      "name": "Mistral Small 2506",
1129      "cost_per_1m_in": 0.105,
1130      "cost_per_1m_out": 0.315,
1131      "cost_per_1m_in_cached": 0,
1132      "cost_per_1m_out_cached": 0,
1133      "context_window": 131072,
1134      "default_max_tokens": 13107,
1135      "can_reason": false,
1136      "supports_attachments": true
1137    },
1138    {
1139      "id": "gemini-2.0-flash-001",
1140      "name": "Gemini 2.0 Flash",
1141      "cost_per_1m_in": 0.141,
1142      "cost_per_1m_out": 0.563,
1143      "cost_per_1m_in_cached": 0,
1144      "cost_per_1m_out_cached": 0,
1145      "context_window": 1048576,
1146      "default_max_tokens": 104857,
1147      "can_reason": false,
1148      "supports_attachments": true
1149    },
1150    {
1151      "id": "gemini-2.0-flash-lite-001",
1152      "name": "Gemini 2.0 Flash Lite",
1153      "cost_per_1m_in": 0.07,
1154      "cost_per_1m_out": 0.282,
1155      "cost_per_1m_in_cached": 0,
1156      "cost_per_1m_out_cached": 0,
1157      "context_window": 1048576,
1158      "default_max_tokens": 104857,
1159      "can_reason": false,
1160      "supports_attachments": true
1161    },
1162    {
1163      "id": "gemini-2.5-flash",
1164      "name": "Gemini 2.5 Flash",
1165      "cost_per_1m_in": 0.282,
1166      "cost_per_1m_out": 2.348,
1167      "cost_per_1m_in_cached": 0,
1168      "cost_per_1m_out_cached": 0,
1169      "context_window": 1048576,
1170      "default_max_tokens": 104857,
1171      "can_reason": true,
1172      "reasoning_levels": [
1173        "low",
1174        "medium",
1175        "high"
1176      ],
1177      "default_reasoning_effort": "medium",
1178      "supports_attachments": true
1179    },
1180    {
1181      "id": "gemini-2.5-pro",
1182      "name": "Gemini 2.5 Pro",
1183      "cost_per_1m_in": 1.409,
1184      "cost_per_1m_out": 9.391,
1185      "cost_per_1m_in_cached": 0,
1186      "cost_per_1m_out_cached": 0,
1187      "context_window": 1048576,
1188      "default_max_tokens": 104857,
1189      "can_reason": true,
1190      "reasoning_levels": [
1191        "low",
1192        "medium",
1193        "high"
1194      ],
1195      "default_reasoning_effort": "medium",
1196      "supports_attachments": true
1197    },
1198    {
1199      "id": "gemma-3-27b-it",
1200      "name": "Gemma 3 27b it",
1201      "cost_per_1m_in": 0.093,
1202      "cost_per_1m_out": 0.281,
1203      "cost_per_1m_in_cached": 0,
1204      "cost_per_1m_out_cached": 0,
1205      "context_window": 131000,
1206      "default_max_tokens": 4000,
1207      "can_reason": true,
1208      "reasoning_levels": [
1209        "low",
1210        "medium",
1211        "high"
1212      ],
1213      "default_reasoning_effort": "medium",
1214      "supports_attachments": true
1215    },
1216    {
1217      "id": "deepseek-r1-distill-llama-70b",
1218      "name": "Deepseek R1 Distill LLama 70B",
1219      "cost_per_1m_in": 0.704,
1220      "cost_per_1m_out": 0.704,
1221      "cost_per_1m_in_cached": 0,
1222      "cost_per_1m_out_cached": 0,
1223      "context_window": 131000,
1224      "default_max_tokens": 1600,
1225      "can_reason": true,
1226      "reasoning_levels": [
1227        "low",
1228        "medium",
1229        "high"
1230      ],
1231      "default_reasoning_effort": "medium",
1232      "supports_attachments": false
1233    },
1234    {
1235      "id": "deepseek-r1-0528",
1236      "name": "DeepSeek R1 0528",
1237      "cost_per_1m_in": 0.752,
1238      "cost_per_1m_out": 2.254,
1239      "cost_per_1m_in_cached": 0,
1240      "cost_per_1m_out_cached": 0,
1241      "context_window": 164000,
1242      "default_max_tokens": 16400,
1243      "can_reason": true,
1244      "reasoning_levels": [
1245        "low",
1246        "medium",
1247        "high"
1248      ],
1249      "default_reasoning_effort": "medium",
1250      "supports_attachments": false
1251    },
1252    {
1253      "id": "codestral-2508",
1254      "name": "Codestral 25.08",
1255      "cost_per_1m_in": 0.315,
1256      "cost_per_1m_out": 0.945,
1257      "cost_per_1m_in_cached": 0,
1258      "cost_per_1m_out_cached": 0,
1259      "context_window": 256000,
1260      "default_max_tokens": 25600,
1261      "can_reason": false,
1262      "supports_attachments": false
1263    },
1264    {
1265      "id": "llama-3.3-70b-instruct",
1266      "name": "Llama 3.3 70B Instruct",
1267      "cost_per_1m_in": 0.093,
1268      "cost_per_1m_out": 0.289,
1269      "cost_per_1m_in_cached": 0,
1270      "cost_per_1m_out_cached": 0,
1271      "context_window": 131000,
1272      "default_max_tokens": 12800,
1273      "can_reason": true,
1274      "reasoning_levels": [
1275        "low",
1276        "medium",
1277        "high"
1278      ],
1279      "default_reasoning_effort": "medium",
1280      "supports_attachments": false
1281    },
1282    {
1283      "id": "gpt-4o",
1284      "name": "GPT 4o",
1285      "cost_per_1m_in": 2.506,
1286      "cost_per_1m_out": 10.024,
1287      "cost_per_1m_in_cached": 0,
1288      "cost_per_1m_out_cached": 0,
1289      "context_window": 128000,
1290      "default_max_tokens": 12800,
1291      "can_reason": true,
1292      "reasoning_levels": [
1293        "low",
1294        "medium",
1295        "high"
1296      ],
1297      "default_reasoning_effort": "medium",
1298      "supports_attachments": true
1299    },
1300    {
1301      "id": "gpt-5-mini",
1302      "name": "GPT 5 mini",
1303      "cost_per_1m_in": 0.263,
1304      "cost_per_1m_out": 2.066,
1305      "cost_per_1m_in_cached": 0,
1306      "cost_per_1m_out_cached": 0,
1307      "context_window": 400000,
1308      "default_max_tokens": 40000,
1309      "can_reason": true,
1310      "reasoning_levels": [
1311        "low",
1312        "medium",
1313        "high"
1314      ],
1315      "default_reasoning_effort": "medium",
1316      "supports_attachments": true
1317    },
1318    {
1319      "id": "gpt-5-nano",
1320      "name": "GPT 5 nano",
1321      "cost_per_1m_in": 0.057,
1322      "cost_per_1m_out": 0.414,
1323      "cost_per_1m_in_cached": 0,
1324      "cost_per_1m_out_cached": 0,
1325      "context_window": 400000,
1326      "default_max_tokens": 40000,
1327      "can_reason": true,
1328      "reasoning_levels": [
1329        "low",
1330        "medium",
1331        "high"
1332      ],
1333      "default_reasoning_effort": "medium",
1334      "supports_attachments": true
1335    },
1336    {
1337      "id": "mistral-large-2411",
1338      "name": "Mistral Large 2411",
1339      "cost_per_1m_in": 1.89,
1340      "cost_per_1m_out": 5.67,
1341      "cost_per_1m_in_cached": 0,
1342      "cost_per_1m_out_cached": 0,
1343      "context_window": 131072,
1344      "default_max_tokens": 13107,
1345      "can_reason": true,
1346      "reasoning_levels": [
1347        "low",
1348        "medium",
1349        "high"
1350      ],
1351      "default_reasoning_effort": "medium",
1352      "supports_attachments": false
1353    },
1354    {
1355      "id": "hermes-4-405b",
1356      "name": "Hermes 4 405B",
1357      "cost_per_1m_in": 0.939,
1358      "cost_per_1m_out": 2.817,
1359      "cost_per_1m_in_cached": 0,
1360      "cost_per_1m_out_cached": 0,
1361      "context_window": 128000,
1362      "default_max_tokens": 12800,
1363      "can_reason": false,
1364      "supports_attachments": false
1365    },
1366    {
1367      "id": "mistral-nemo-instruct-2407",
1368      "name": "Mistral Nemo 2407",
1369      "cost_per_1m_in": 0.137,
1370      "cost_per_1m_out": 0.137,
1371      "cost_per_1m_in_cached": 0,
1372      "cost_per_1m_out_cached": 0,
1373      "context_window": 131072,
1374      "default_max_tokens": 11800,
1375      "can_reason": false,
1376      "supports_attachments": false
1377    },
1378    {
1379      "id": "devstral-medium-2507",
1380      "name": "Devstral Medium 2507",
1381      "cost_per_1m_in": 0.42,
1382      "cost_per_1m_out": 2.1,
1383      "cost_per_1m_in_cached": 0,
1384      "cost_per_1m_out_cached": 0,
1385      "context_window": 131072,
1386      "default_max_tokens": 13107,
1387      "can_reason": false,
1388      "supports_attachments": false
1389    },
1390    {
1391      "id": "devstral-small-2507",
1392      "name": "Devstral Small 2507",
1393      "cost_per_1m_in": 0.105,
1394      "cost_per_1m_out": 0.315,
1395      "cost_per_1m_in_cached": 0,
1396      "cost_per_1m_out_cached": 0,
1397      "context_window": 131072,
1398      "default_max_tokens": 13107,
1399      "can_reason": false,
1400      "supports_attachments": false
1401    },
1402    {
1403      "id": "mistral-medium-2508",
1404      "name": "Mistral Medium 2508",
1405      "cost_per_1m_in": 0.42,
1406      "cost_per_1m_out": 2.1,
1407      "cost_per_1m_in_cached": 0,
1408      "cost_per_1m_out_cached": 0,
1409      "context_window": 128000,
1410      "default_max_tokens": 12800,
1411      "can_reason": true,
1412      "reasoning_levels": [
1413        "low",
1414        "medium",
1415        "high"
1416      ],
1417      "default_reasoning_effort": "medium",
1418      "supports_attachments": true
1419    },
1420    {
1421      "id": "llama-3.1-405b-instruct",
1422      "name": "Llama 3.1 405B Instruct",
1423      "cost_per_1m_in": 1.838,
1424      "cost_per_1m_out": 1.838,
1425      "cost_per_1m_in_cached": 0,
1426      "cost_per_1m_out_cached": 0,
1427      "context_window": 128000,
1428      "default_max_tokens": 12800,
1429      "can_reason": true,
1430      "reasoning_levels": [
1431        "low",
1432        "medium",
1433        "high"
1434      ],
1435      "default_reasoning_effort": "medium",
1436      "supports_attachments": false
1437    },
1438    {
1439      "id": "gpt-4o-mini",
1440      "name": "GPT 4o mini",
1441      "cost_per_1m_in": 0.15,
1442      "cost_per_1m_out": 0.601,
1443      "cost_per_1m_in_cached": 0,
1444      "cost_per_1m_out_cached": 0,
1445      "context_window": 128000,
1446      "default_max_tokens": 12800,
1447      "can_reason": true,
1448      "reasoning_levels": [
1449        "low",
1450        "medium",
1451        "high"
1452      ],
1453      "default_reasoning_effort": "medium",
1454      "supports_attachments": true
1455    },
1456    {
1457      "id": "llama-3.1-8b-instruct",
1458      "name": "Llama 3.1 8B Instruct",
1459      "cost_per_1m_in": 0.019,
1460      "cost_per_1m_out": 0.057,
1461      "cost_per_1m_in_cached": 0,
1462      "cost_per_1m_out_cached": 0,
1463      "context_window": 128000,
1464      "default_max_tokens": 12800,
1465      "can_reason": true,
1466      "reasoning_levels": [
1467        "low",
1468        "medium",
1469        "high"
1470      ],
1471      "default_reasoning_effort": "medium",
1472      "supports_attachments": false
1473    }
1474  ]
1475}