openrouter.json

   1{
   2  "name": "OpenRouter",
   3  "id": "openrouter",
   4  "api_key": "$OPENROUTER_API_KEY",
   5  "api_endpoint": "https://openrouter.ai/api/v1",
   6  "type": "openai",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-3.5-haiku",
   9  "models": [
  10    {
  11      "id": "qwen/qwen3-30b-a3b-thinking-2507",
  12      "name": "Qwen: Qwen3 30B A3B Thinking 2507",
  13      "cost_per_1m_in": 0.09999999999999999,
  14      "cost_per_1m_out": 0.3,
  15      "cost_per_1m_in_cached": 0,
  16      "cost_per_1m_out_cached": 0,
  17      "context_window": 262144,
  18      "default_max_tokens": 26214,
  19      "can_reason": true,
  20      "has_reasoning_efforts": false,
  21      "supports_attachments": false
  22    },
  23    {
  24      "id": "x-ai/grok-code-fast-1",
  25      "name": "xAI: Grok Code Fast 1",
  26      "cost_per_1m_in": 0.19999999999999998,
  27      "cost_per_1m_out": 1.5,
  28      "cost_per_1m_in_cached": 0,
  29      "cost_per_1m_out_cached": 0.02,
  30      "context_window": 256000,
  31      "default_max_tokens": 5000,
  32      "can_reason": true,
  33      "has_reasoning_efforts": false,
  34      "supports_attachments": false
  35    },
  36    {
  37      "id": "nousresearch/hermes-4-70b",
  38      "name": "Nous: Hermes 4 70B",
  39      "cost_per_1m_in": 0.09329544,
  40      "cost_per_1m_out": 0.3733632,
  41      "cost_per_1m_in_cached": 0,
  42      "cost_per_1m_out_cached": 0,
  43      "context_window": 131072,
  44      "default_max_tokens": 13107,
  45      "can_reason": true,
  46      "has_reasoning_efforts": false,
  47      "supports_attachments": false
  48    },
  49    {
  50      "id": "nousresearch/hermes-4-405b",
  51      "name": "Nous: Hermes 4 405B",
  52      "cost_per_1m_in": 0.1999188,
  53      "cost_per_1m_out": 0.800064,
  54      "cost_per_1m_in_cached": 0,
  55      "cost_per_1m_out_cached": 0,
  56      "context_window": 131072,
  57      "default_max_tokens": 13107,
  58      "can_reason": true,
  59      "has_reasoning_efforts": false,
  60      "supports_attachments": false
  61    },
  62    {
  63      "id": "deepseek/deepseek-chat-v3.1:free",
  64      "name": "DeepSeek: DeepSeek V3.1 (free)",
  65      "cost_per_1m_in": 0,
  66      "cost_per_1m_out": 0,
  67      "cost_per_1m_in_cached": 0,
  68      "cost_per_1m_out_cached": 0,
  69      "context_window": 163840,
  70      "default_max_tokens": 16384,
  71      "can_reason": true,
  72      "has_reasoning_efforts": false,
  73      "supports_attachments": false
  74    },
  75    {
  76      "id": "deepseek/deepseek-chat-v3.1",
  77      "name": "DeepSeek: DeepSeek V3.1",
  78      "cost_per_1m_in": 0.3,
  79      "cost_per_1m_out": 1,
  80      "cost_per_1m_in_cached": 0,
  81      "cost_per_1m_out_cached": 0,
  82      "context_window": 163840,
  83      "default_max_tokens": 16384,
  84      "can_reason": true,
  85      "has_reasoning_efforts": false,
  86      "supports_attachments": false
  87    },
  88    {
  89      "id": "openai/gpt-4o-audio-preview",
  90      "name": "OpenAI: GPT-4o Audio",
  91      "cost_per_1m_in": 2.5,
  92      "cost_per_1m_out": 10,
  93      "cost_per_1m_in_cached": 0,
  94      "cost_per_1m_out_cached": 0,
  95      "context_window": 128000,
  96      "default_max_tokens": 8192,
  97      "can_reason": false,
  98      "has_reasoning_efforts": false,
  99      "supports_attachments": false
 100    },
 101    {
 102      "id": "mistralai/mistral-medium-3.1",
 103      "name": "Mistral: Mistral Medium 3.1",
 104      "cost_per_1m_in": 0.39999999999999997,
 105      "cost_per_1m_out": 2,
 106      "cost_per_1m_in_cached": 0,
 107      "cost_per_1m_out_cached": 0,
 108      "context_window": 131072,
 109      "default_max_tokens": 13107,
 110      "can_reason": false,
 111      "has_reasoning_efforts": false,
 112      "supports_attachments": true
 113    },
 114    {
 115      "id": "z-ai/glm-4.5v",
 116      "name": "Z.AI: GLM 4.5V",
 117      "cost_per_1m_in": 0.6,
 118      "cost_per_1m_out": 1.7999999999999998,
 119      "cost_per_1m_in_cached": 0,
 120      "cost_per_1m_out_cached": 0.11,
 121      "context_window": 65536,
 122      "default_max_tokens": 8192,
 123      "can_reason": true,
 124      "has_reasoning_efforts": false,
 125      "supports_attachments": true
 126    },
 127    {
 128      "id": "ai21/jamba-mini-1.7",
 129      "name": "AI21: Jamba Mini 1.7",
 130      "cost_per_1m_in": 0.19999999999999998,
 131      "cost_per_1m_out": 0.39999999999999997,
 132      "cost_per_1m_in_cached": 0,
 133      "cost_per_1m_out_cached": 0,
 134      "context_window": 256000,
 135      "default_max_tokens": 2048,
 136      "can_reason": false,
 137      "has_reasoning_efforts": false,
 138      "supports_attachments": false
 139    },
 140    {
 141      "id": "ai21/jamba-large-1.7",
 142      "name": "AI21: Jamba Large 1.7",
 143      "cost_per_1m_in": 2,
 144      "cost_per_1m_out": 8,
 145      "cost_per_1m_in_cached": 0,
 146      "cost_per_1m_out_cached": 0,
 147      "context_window": 256000,
 148      "default_max_tokens": 2048,
 149      "can_reason": false,
 150      "has_reasoning_efforts": false,
 151      "supports_attachments": false
 152    },
 153    {
 154      "id": "openai/gpt-5",
 155      "name": "OpenAI: GPT-5",
 156      "cost_per_1m_in": 1.25,
 157      "cost_per_1m_out": 10,
 158      "cost_per_1m_in_cached": 0,
 159      "cost_per_1m_out_cached": 0.125,
 160      "context_window": 400000,
 161      "default_max_tokens": 64000,
 162      "can_reason": true,
 163      "has_reasoning_efforts": false,
 164      "supports_attachments": true
 165    },
 166    {
 167      "id": "openai/gpt-5-mini",
 168      "name": "OpenAI: GPT-5 Mini",
 169      "cost_per_1m_in": 0.25,
 170      "cost_per_1m_out": 2,
 171      "cost_per_1m_in_cached": 0,
 172      "cost_per_1m_out_cached": 0.024999999999999998,
 173      "context_window": 400000,
 174      "default_max_tokens": 64000,
 175      "can_reason": true,
 176      "has_reasoning_efforts": false,
 177      "supports_attachments": true
 178    },
 179    {
 180      "id": "openai/gpt-5-nano",
 181      "name": "OpenAI: GPT-5 Nano",
 182      "cost_per_1m_in": 0.049999999999999996,
 183      "cost_per_1m_out": 0.39999999999999997,
 184      "cost_per_1m_in_cached": 0,
 185      "cost_per_1m_out_cached": 0.005,
 186      "context_window": 400000,
 187      "default_max_tokens": 64000,
 188      "can_reason": true,
 189      "has_reasoning_efforts": false,
 190      "supports_attachments": true
 191    },
 192    {
 193      "id": "openai/gpt-oss-120b",
 194      "name": "OpenAI: gpt-oss-120b",
 195      "cost_per_1m_in": 0.09999999999999999,
 196      "cost_per_1m_out": 0.5,
 197      "cost_per_1m_in_cached": 0,
 198      "cost_per_1m_out_cached": 0,
 199      "context_window": 131072,
 200      "default_max_tokens": 13107,
 201      "can_reason": true,
 202      "has_reasoning_efforts": false,
 203      "supports_attachments": false
 204    },
 205    {
 206      "id": "openai/gpt-oss-20b",
 207      "name": "OpenAI: gpt-oss-20b",
 208      "cost_per_1m_in": 0.04,
 209      "cost_per_1m_out": 0.16,
 210      "cost_per_1m_in_cached": 0,
 211      "cost_per_1m_out_cached": 0,
 212      "context_window": 131072,
 213      "default_max_tokens": 13107,
 214      "can_reason": true,
 215      "has_reasoning_efforts": false,
 216      "supports_attachments": false
 217    },
 218    {
 219      "id": "anthropic/claude-opus-4.1",
 220      "name": "Anthropic: Claude Opus 4.1",
 221      "cost_per_1m_in": 15,
 222      "cost_per_1m_out": 75,
 223      "cost_per_1m_in_cached": 18.75,
 224      "cost_per_1m_out_cached": 1.5,
 225      "context_window": 200000,
 226      "default_max_tokens": 16000,
 227      "can_reason": true,
 228      "has_reasoning_efforts": false,
 229      "supports_attachments": true
 230    },
 231    {
 232      "id": "mistralai/codestral-2508",
 233      "name": "Mistral: Codestral 2508",
 234      "cost_per_1m_in": 0.3,
 235      "cost_per_1m_out": 0.8999999999999999,
 236      "cost_per_1m_in_cached": 0,
 237      "cost_per_1m_out_cached": 0,
 238      "context_window": 256000,
 239      "default_max_tokens": 25600,
 240      "can_reason": false,
 241      "has_reasoning_efforts": false,
 242      "supports_attachments": false
 243    },
 244    {
 245      "id": "qwen/qwen3-coder-30b-a3b-instruct",
 246      "name": "Qwen: Qwen3 Coder 30B A3B Instruct",
 247      "cost_per_1m_in": 0.051830799999999996,
 248      "cost_per_1m_out": 0.207424,
 249      "cost_per_1m_in_cached": 0,
 250      "cost_per_1m_out_cached": 0,
 251      "context_window": 262144,
 252      "default_max_tokens": 26214,
 253      "can_reason": false,
 254      "has_reasoning_efforts": false,
 255      "supports_attachments": false
 256    },
 257    {
 258      "id": "z-ai/glm-4.5",
 259      "name": "Z.AI: GLM 4.5",
 260      "cost_per_1m_in": 0.6,
 261      "cost_per_1m_out": 2.2,
 262      "cost_per_1m_in_cached": 0,
 263      "cost_per_1m_out_cached": 0,
 264      "context_window": 131072,
 265      "default_max_tokens": 65536,
 266      "can_reason": true,
 267      "has_reasoning_efforts": false,
 268      "supports_attachments": false
 269    },
 270    {
 271      "id": "z-ai/glm-4.5-air:free",
 272      "name": "Z.AI: GLM 4.5 Air (free)",
 273      "cost_per_1m_in": 0,
 274      "cost_per_1m_out": 0,
 275      "cost_per_1m_in_cached": 0,
 276      "cost_per_1m_out_cached": 0,
 277      "context_window": 131072,
 278      "default_max_tokens": 48000,
 279      "can_reason": true,
 280      "has_reasoning_efforts": false,
 281      "supports_attachments": false
 282    },
 283    {
 284      "id": "z-ai/glm-4.5-air",
 285      "name": "Z.AI: GLM 4.5 Air",
 286      "cost_per_1m_in": 0.14,
 287      "cost_per_1m_out": 0.86,
 288      "cost_per_1m_in_cached": 0,
 289      "cost_per_1m_out_cached": 0,
 290      "context_window": 131072,
 291      "default_max_tokens": 65536,
 292      "can_reason": true,
 293      "has_reasoning_efforts": false,
 294      "supports_attachments": false
 295    },
 296    {
 297      "id": "qwen/qwen3-235b-a22b-thinking-2507",
 298      "name": "Qwen: Qwen3 235B A22B Thinking 2507",
 299      "cost_per_1m_in": 0.077968332,
 300      "cost_per_1m_out": 0.31202496,
 301      "cost_per_1m_in_cached": 0,
 302      "cost_per_1m_out_cached": 0,
 303      "context_window": 262144,
 304      "default_max_tokens": 26214,
 305      "can_reason": true,
 306      "has_reasoning_efforts": false,
 307      "supports_attachments": false
 308    },
 309    {
 310      "id": "z-ai/glm-4-32b",
 311      "name": "Z.AI: GLM 4 32B ",
 312      "cost_per_1m_in": 0.09999999999999999,
 313      "cost_per_1m_out": 0.09999999999999999,
 314      "cost_per_1m_in_cached": 0,
 315      "cost_per_1m_out_cached": 0,
 316      "context_window": 128000,
 317      "default_max_tokens": 12800,
 318      "can_reason": false,
 319      "has_reasoning_efforts": false,
 320      "supports_attachments": false
 321    },
 322    {
 323      "id": "qwen/qwen3-coder:free",
 324      "name": "Qwen: Qwen3 Coder 480B A35B (free)",
 325      "cost_per_1m_in": 0,
 326      "cost_per_1m_out": 0,
 327      "cost_per_1m_in_cached": 0,
 328      "cost_per_1m_out_cached": 0,
 329      "context_window": 262144,
 330      "default_max_tokens": 26214,
 331      "can_reason": false,
 332      "has_reasoning_efforts": false,
 333      "supports_attachments": false
 334    },
 335    {
 336      "id": "qwen/qwen3-coder",
 337      "name": "Qwen: Qwen3 Coder 480B A35B",
 338      "cost_per_1m_in": 0.3,
 339      "cost_per_1m_out": 1.2,
 340      "cost_per_1m_in_cached": 0,
 341      "cost_per_1m_out_cached": 0,
 342      "context_window": 262144,
 343      "default_max_tokens": 26214,
 344      "can_reason": false,
 345      "has_reasoning_efforts": false,
 346      "supports_attachments": false
 347    },
 348    {
 349      "id": "google/gemini-2.5-flash-lite",
 350      "name": "Google: Gemini 2.5 Flash Lite",
 351      "cost_per_1m_in": 0.09999999999999999,
 352      "cost_per_1m_out": 0.39999999999999997,
 353      "cost_per_1m_in_cached": 0.18330000000000002,
 354      "cost_per_1m_out_cached": 0.024999999999999998,
 355      "context_window": 1048576,
 356      "default_max_tokens": 32767,
 357      "can_reason": true,
 358      "has_reasoning_efforts": false,
 359      "supports_attachments": true
 360    },
 361    {
 362      "id": "qwen/qwen3-235b-a22b-2507",
 363      "name": "Qwen: Qwen3 235B A22B Instruct 2507",
 364      "cost_per_1m_in": 0.15,
 365      "cost_per_1m_out": 0.7999999999999999,
 366      "cost_per_1m_in_cached": 0,
 367      "cost_per_1m_out_cached": 0,
 368      "context_window": 262144,
 369      "default_max_tokens": 131072,
 370      "can_reason": false,
 371      "has_reasoning_efforts": false,
 372      "supports_attachments": false
 373    },
 374    {
 375      "id": "moonshotai/kimi-k2:free",
 376      "name": "MoonshotAI: Kimi K2 (free)",
 377      "cost_per_1m_in": 0,
 378      "cost_per_1m_out": 0,
 379      "cost_per_1m_in_cached": 0,
 380      "cost_per_1m_out_cached": 0,
 381      "context_window": 32768,
 382      "default_max_tokens": 3276,
 383      "can_reason": false,
 384      "has_reasoning_efforts": false,
 385      "supports_attachments": false
 386    },
 387    {
 388      "id": "moonshotai/kimi-k2",
 389      "name": "MoonshotAI: Kimi K2",
 390      "cost_per_1m_in": 0.5,
 391      "cost_per_1m_out": 2.4,
 392      "cost_per_1m_in_cached": 0,
 393      "cost_per_1m_out_cached": 0,
 394      "context_window": 131072,
 395      "default_max_tokens": 13107,
 396      "can_reason": false,
 397      "has_reasoning_efforts": false,
 398      "supports_attachments": false
 399    },
 400    {
 401      "id": "mistralai/devstral-medium",
 402      "name": "Mistral: Devstral Medium",
 403      "cost_per_1m_in": 0.39999999999999997,
 404      "cost_per_1m_out": 2,
 405      "cost_per_1m_in_cached": 0,
 406      "cost_per_1m_out_cached": 0,
 407      "context_window": 131072,
 408      "default_max_tokens": 13107,
 409      "can_reason": false,
 410      "has_reasoning_efforts": false,
 411      "supports_attachments": false
 412    },
 413    {
 414      "id": "x-ai/grok-4",
 415      "name": "xAI: Grok 4",
 416      "cost_per_1m_in": 3,
 417      "cost_per_1m_out": 15,
 418      "cost_per_1m_in_cached": 0,
 419      "cost_per_1m_out_cached": 0.75,
 420      "context_window": 256000,
 421      "default_max_tokens": 25600,
 422      "can_reason": true,
 423      "has_reasoning_efforts": false,
 424      "supports_attachments": true
 425    },
 426    {
 427      "id": "inception/mercury",
 428      "name": "Inception: Mercury",
 429      "cost_per_1m_in": 0.25,
 430      "cost_per_1m_out": 1,
 431      "cost_per_1m_in_cached": 0,
 432      "cost_per_1m_out_cached": 0,
 433      "context_window": 128000,
 434      "default_max_tokens": 8192,
 435      "can_reason": false,
 436      "has_reasoning_efforts": false,
 437      "supports_attachments": false
 438    },
 439    {
 440      "id": "mistralai/mistral-small-3.2-24b-instruct:free",
 441      "name": "Mistral: Mistral Small 3.2 24B (free)",
 442      "cost_per_1m_in": 0,
 443      "cost_per_1m_out": 0,
 444      "cost_per_1m_in_cached": 0,
 445      "cost_per_1m_out_cached": 0,
 446      "context_window": 131072,
 447      "default_max_tokens": 13107,
 448      "can_reason": false,
 449      "has_reasoning_efforts": false,
 450      "supports_attachments": true
 451    },
 452    {
 453      "id": "mistralai/mistral-small-3.2-24b-instruct",
 454      "name": "Mistral: Mistral Small 3.2 24B",
 455      "cost_per_1m_in": 0.09999999999999999,
 456      "cost_per_1m_out": 0.3,
 457      "cost_per_1m_in_cached": 0,
 458      "cost_per_1m_out_cached": 0,
 459      "context_window": 131072,
 460      "default_max_tokens": 13107,
 461      "can_reason": false,
 462      "has_reasoning_efforts": false,
 463      "supports_attachments": true
 464    },
 465    {
 466      "id": "google/gemini-2.5-flash-lite-preview-06-17",
 467      "name": "Google: Gemini 2.5 Flash Lite Preview 06-17",
 468      "cost_per_1m_in": 0.09999999999999999,
 469      "cost_per_1m_out": 0.39999999999999997,
 470      "cost_per_1m_in_cached": 0.18330000000000002,
 471      "cost_per_1m_out_cached": 0.024999999999999998,
 472      "context_window": 1048576,
 473      "default_max_tokens": 32767,
 474      "can_reason": true,
 475      "has_reasoning_efforts": false,
 476      "supports_attachments": true
 477    },
 478    {
 479      "id": "google/gemini-2.5-flash",
 480      "name": "Google: Gemini 2.5 Flash",
 481      "cost_per_1m_in": 0.3,
 482      "cost_per_1m_out": 2.5,
 483      "cost_per_1m_in_cached": 0.3833,
 484      "cost_per_1m_out_cached": 0.075,
 485      "context_window": 1048576,
 486      "default_max_tokens": 32767,
 487      "can_reason": true,
 488      "has_reasoning_efforts": false,
 489      "supports_attachments": true
 490    },
 491    {
 492      "id": "google/gemini-2.5-pro",
 493      "name": "Google: Gemini 2.5 Pro",
 494      "cost_per_1m_in": 1.25,
 495      "cost_per_1m_out": 10,
 496      "cost_per_1m_in_cached": 1.625,
 497      "cost_per_1m_out_cached": 0.31,
 498      "context_window": 1048576,
 499      "default_max_tokens": 32768,
 500      "can_reason": true,
 501      "has_reasoning_efforts": false,
 502      "supports_attachments": true
 503    },
 504    {
 505      "id": "openai/o3-pro",
 506      "name": "OpenAI: o3 Pro",
 507      "cost_per_1m_in": 20,
 508      "cost_per_1m_out": 80,
 509      "cost_per_1m_in_cached": 0,
 510      "cost_per_1m_out_cached": 0,
 511      "context_window": 200000,
 512      "default_max_tokens": 50000,
 513      "can_reason": true,
 514      "has_reasoning_efforts": false,
 515      "supports_attachments": true
 516    },
 517    {
 518      "id": "x-ai/grok-3-mini",
 519      "name": "xAI: Grok 3 Mini",
 520      "cost_per_1m_in": 0.6,
 521      "cost_per_1m_out": 4,
 522      "cost_per_1m_in_cached": 0,
 523      "cost_per_1m_out_cached": 0.15,
 524      "context_window": 131072,
 525      "default_max_tokens": 13107,
 526      "can_reason": true,
 527      "has_reasoning_efforts": false,
 528      "supports_attachments": false
 529    },
 530    {
 531      "id": "x-ai/grok-3",
 532      "name": "xAI: Grok 3",
 533      "cost_per_1m_in": 3,
 534      "cost_per_1m_out": 15,
 535      "cost_per_1m_in_cached": 0,
 536      "cost_per_1m_out_cached": 0.75,
 537      "context_window": 131072,
 538      "default_max_tokens": 13107,
 539      "can_reason": false,
 540      "has_reasoning_efforts": false,
 541      "supports_attachments": false
 542    },
 543    {
 544      "id": "mistralai/magistral-small-2506",
 545      "name": "Mistral: Magistral Small 2506",
 546      "cost_per_1m_in": 0.5,
 547      "cost_per_1m_out": 1.5,
 548      "cost_per_1m_in_cached": 0,
 549      "cost_per_1m_out_cached": 0,
 550      "context_window": 40000,
 551      "default_max_tokens": 20000,
 552      "can_reason": true,
 553      "has_reasoning_efforts": false,
 554      "supports_attachments": false
 555    },
 556    {
 557      "id": "mistralai/magistral-medium-2506",
 558      "name": "Mistral: Magistral Medium 2506",
 559      "cost_per_1m_in": 2,
 560      "cost_per_1m_out": 5,
 561      "cost_per_1m_in_cached": 0,
 562      "cost_per_1m_out_cached": 0,
 563      "context_window": 40960,
 564      "default_max_tokens": 20000,
 565      "can_reason": true,
 566      "has_reasoning_efforts": false,
 567      "supports_attachments": false
 568    },
 569    {
 570      "id": "mistralai/magistral-medium-2506:thinking",
 571      "name": "Mistral: Magistral Medium 2506 (thinking)",
 572      "cost_per_1m_in": 2,
 573      "cost_per_1m_out": 5,
 574      "cost_per_1m_in_cached": 0,
 575      "cost_per_1m_out_cached": 0,
 576      "context_window": 40960,
 577      "default_max_tokens": 20000,
 578      "can_reason": true,
 579      "has_reasoning_efforts": false,
 580      "supports_attachments": false
 581    },
 582    {
 583      "id": "google/gemini-2.5-pro-preview",
 584      "name": "Google: Gemini 2.5 Pro Preview 06-05",
 585      "cost_per_1m_in": 1.25,
 586      "cost_per_1m_out": 10,
 587      "cost_per_1m_in_cached": 1.625,
 588      "cost_per_1m_out_cached": 0.31,
 589      "context_window": 1048576,
 590      "default_max_tokens": 32768,
 591      "can_reason": true,
 592      "has_reasoning_efforts": false,
 593      "supports_attachments": true
 594    },
 595    {
 596      "id": "deepseek/deepseek-r1-0528",
 597      "name": "DeepSeek: R1 0528",
 598      "cost_per_1m_in": 2.5500000000000003,
 599      "cost_per_1m_out": 5.95,
 600      "cost_per_1m_in_cached": 0,
 601      "cost_per_1m_out_cached": 0,
 602      "context_window": 163840,
 603      "default_max_tokens": 65536,
 604      "can_reason": true,
 605      "has_reasoning_efforts": false,
 606      "supports_attachments": false
 607    },
 608    {
 609      "id": "anthropic/claude-opus-4",
 610      "name": "Anthropic: Claude Opus 4",
 611      "cost_per_1m_in": 15,
 612      "cost_per_1m_out": 75,
 613      "cost_per_1m_in_cached": 18.75,
 614      "cost_per_1m_out_cached": 1.5,
 615      "context_window": 200000,
 616      "default_max_tokens": 16000,
 617      "can_reason": true,
 618      "has_reasoning_efforts": false,
 619      "supports_attachments": true
 620    },
 621    {
 622      "id": "anthropic/claude-sonnet-4",
 623      "name": "Anthropic: Claude Sonnet 4",
 624      "cost_per_1m_in": 3,
 625      "cost_per_1m_out": 15,
 626      "cost_per_1m_in_cached": 3.75,
 627      "cost_per_1m_out_cached": 0.3,
 628      "context_window": 1000000,
 629      "default_max_tokens": 32000,
 630      "can_reason": true,
 631      "has_reasoning_efforts": false,
 632      "supports_attachments": true
 633    },
 634    {
 635      "id": "mistralai/devstral-small-2505:free",
 636      "name": "Mistral: Devstral Small 2505 (free)",
 637      "cost_per_1m_in": 0,
 638      "cost_per_1m_out": 0,
 639      "cost_per_1m_in_cached": 0,
 640      "cost_per_1m_out_cached": 0,
 641      "context_window": 32768,
 642      "default_max_tokens": 3276,
 643      "can_reason": false,
 644      "has_reasoning_efforts": false,
 645      "supports_attachments": false
 646    },
 647    {
 648      "id": "mistralai/devstral-small-2505",
 649      "name": "Mistral: Devstral Small 2505",
 650      "cost_per_1m_in": 0.01999188,
 651      "cost_per_1m_out": 0.0800064,
 652      "cost_per_1m_in_cached": 0,
 653      "cost_per_1m_out_cached": 0,
 654      "context_window": 131072,
 655      "default_max_tokens": 13107,
 656      "can_reason": false,
 657      "has_reasoning_efforts": false,
 658      "supports_attachments": false
 659    },
 660    {
 661      "id": "openai/codex-mini",
 662      "name": "OpenAI: Codex Mini",
 663      "cost_per_1m_in": 1.5,
 664      "cost_per_1m_out": 6,
 665      "cost_per_1m_in_cached": 0,
 666      "cost_per_1m_out_cached": 0.375,
 667      "context_window": 200000,
 668      "default_max_tokens": 50000,
 669      "can_reason": true,
 670      "has_reasoning_efforts": false,
 671      "supports_attachments": true
 672    },
 673    {
 674      "id": "meta-llama/llama-3.3-8b-instruct:free",
 675      "name": "Meta: Llama 3.3 8B Instruct (free)",
 676      "cost_per_1m_in": 0,
 677      "cost_per_1m_out": 0,
 678      "cost_per_1m_in_cached": 0,
 679      "cost_per_1m_out_cached": 0,
 680      "context_window": 128000,
 681      "default_max_tokens": 2014,
 682      "can_reason": false,
 683      "has_reasoning_efforts": false,
 684      "supports_attachments": false
 685    },
 686    {
 687      "id": "mistralai/mistral-medium-3",
 688      "name": "Mistral: Mistral Medium 3",
 689      "cost_per_1m_in": 0.39999999999999997,
 690      "cost_per_1m_out": 2,
 691      "cost_per_1m_in_cached": 0,
 692      "cost_per_1m_out_cached": 0,
 693      "context_window": 131072,
 694      "default_max_tokens": 13107,
 695      "can_reason": false,
 696      "has_reasoning_efforts": false,
 697      "supports_attachments": true
 698    },
 699    {
 700      "id": "google/gemini-2.5-pro-preview-05-06",
 701      "name": "Google: Gemini 2.5 Pro Preview 05-06",
 702      "cost_per_1m_in": 1.25,
 703      "cost_per_1m_out": 10,
 704      "cost_per_1m_in_cached": 1.625,
 705      "cost_per_1m_out_cached": 0.31,
 706      "context_window": 1048576,
 707      "default_max_tokens": 32768,
 708      "can_reason": true,
 709      "has_reasoning_efforts": false,
 710      "supports_attachments": true
 711    },
 712    {
 713      "id": "arcee-ai/virtuoso-large",
 714      "name": "Arcee AI: Virtuoso Large",
 715      "cost_per_1m_in": 0.75,
 716      "cost_per_1m_out": 1.2,
 717      "cost_per_1m_in_cached": 0,
 718      "cost_per_1m_out_cached": 0,
 719      "context_window": 131072,
 720      "default_max_tokens": 32000,
 721      "can_reason": false,
 722      "has_reasoning_efforts": false,
 723      "supports_attachments": false
 724    },
 725    {
 726      "id": "inception/mercury-coder",
 727      "name": "Inception: Mercury Coder",
 728      "cost_per_1m_in": 0.25,
 729      "cost_per_1m_out": 1,
 730      "cost_per_1m_in_cached": 0,
 731      "cost_per_1m_out_cached": 0,
 732      "context_window": 128000,
 733      "default_max_tokens": 8192,
 734      "can_reason": false,
 735      "has_reasoning_efforts": false,
 736      "supports_attachments": false
 737    },
 738    {
 739      "id": "qwen/qwen3-4b:free",
 740      "name": "Qwen: Qwen3 4B (free)",
 741      "cost_per_1m_in": 0,
 742      "cost_per_1m_out": 0,
 743      "cost_per_1m_in_cached": 0,
 744      "cost_per_1m_out_cached": 0,
 745      "context_window": 40960,
 746      "default_max_tokens": 4096,
 747      "can_reason": true,
 748      "has_reasoning_efforts": false,
 749      "supports_attachments": false
 750    },
 751    {
 752      "id": "qwen/qwen3-30b-a3b",
 753      "name": "Qwen: Qwen3 30B A3B",
 754      "cost_per_1m_in": 0.15,
 755      "cost_per_1m_out": 0.6,
 756      "cost_per_1m_in_cached": 0,
 757      "cost_per_1m_out_cached": 0,
 758      "context_window": 131072,
 759      "default_max_tokens": 4000,
 760      "can_reason": true,
 761      "has_reasoning_efforts": false,
 762      "supports_attachments": false
 763    },
 764    {
 765      "id": "qwen/qwen3-14b",
 766      "name": "Qwen: Qwen3 14B",
 767      "cost_per_1m_in": 0.06,
 768      "cost_per_1m_out": 0.24,
 769      "cost_per_1m_in_cached": 0,
 770      "cost_per_1m_out_cached": 0,
 771      "context_window": 40960,
 772      "default_max_tokens": 20480,
 773      "can_reason": true,
 774      "has_reasoning_efforts": false,
 775      "supports_attachments": false
 776    },
 777    {
 778      "id": "qwen/qwen3-32b",
 779      "name": "Qwen: Qwen3 32B",
 780      "cost_per_1m_in": 0.15,
 781      "cost_per_1m_out": 0.5,
 782      "cost_per_1m_in_cached": 0,
 783      "cost_per_1m_out_cached": 0,
 784      "context_window": 131072,
 785      "default_max_tokens": 4000,
 786      "can_reason": true,
 787      "has_reasoning_efforts": false,
 788      "supports_attachments": false
 789    },
 790    {
 791      "id": "qwen/qwen3-235b-a22b:free",
 792      "name": "Qwen: Qwen3 235B A22B (free)",
 793      "cost_per_1m_in": 0,
 794      "cost_per_1m_out": 0,
 795      "cost_per_1m_in_cached": 0,
 796      "cost_per_1m_out_cached": 0,
 797      "context_window": 131072,
 798      "default_max_tokens": 13107,
 799      "can_reason": false,
 800      "has_reasoning_efforts": false,
 801      "supports_attachments": false
 802    },
 803    {
 804      "id": "qwen/qwen3-235b-a22b",
 805      "name": "Qwen: Qwen3 235B A22B",
 806      "cost_per_1m_in": 0.22,
 807      "cost_per_1m_out": 0.88,
 808      "cost_per_1m_in_cached": 0,
 809      "cost_per_1m_out_cached": 0,
 810      "context_window": 131072,
 811      "default_max_tokens": 13107,
 812      "can_reason": true,
 813      "has_reasoning_efforts": false,
 814      "supports_attachments": false
 815    },
 816    {
 817      "id": "openai/o4-mini-high",
 818      "name": "OpenAI: o4 Mini High",
 819      "cost_per_1m_in": 1.1,
 820      "cost_per_1m_out": 4.4,
 821      "cost_per_1m_in_cached": 0,
 822      "cost_per_1m_out_cached": 0.275,
 823      "context_window": 200000,
 824      "default_max_tokens": 50000,
 825      "can_reason": true,
 826      "has_reasoning_efforts": false,
 827      "supports_attachments": true
 828    },
 829    {
 830      "id": "openai/o3",
 831      "name": "OpenAI: o3",
 832      "cost_per_1m_in": 2,
 833      "cost_per_1m_out": 8,
 834      "cost_per_1m_in_cached": 0,
 835      "cost_per_1m_out_cached": 0.5,
 836      "context_window": 200000,
 837      "default_max_tokens": 50000,
 838      "can_reason": true,
 839      "has_reasoning_efforts": false,
 840      "supports_attachments": true
 841    },
 842    {
 843      "id": "openai/o4-mini",
 844      "name": "OpenAI: o4 Mini",
 845      "cost_per_1m_in": 1.1,
 846      "cost_per_1m_out": 4.4,
 847      "cost_per_1m_in_cached": 0,
 848      "cost_per_1m_out_cached": 0.275,
 849      "context_window": 200000,
 850      "default_max_tokens": 50000,
 851      "can_reason": true,
 852      "has_reasoning_efforts": false,
 853      "supports_attachments": true
 854    },
 855    {
 856      "id": "openai/gpt-4.1",
 857      "name": "OpenAI: GPT-4.1",
 858      "cost_per_1m_in": 2,
 859      "cost_per_1m_out": 8,
 860      "cost_per_1m_in_cached": 0,
 861      "cost_per_1m_out_cached": 0.5,
 862      "context_window": 1047576,
 863      "default_max_tokens": 16384,
 864      "can_reason": false,
 865      "has_reasoning_efforts": false,
 866      "supports_attachments": true
 867    },
 868    {
 869      "id": "openai/gpt-4.1-mini",
 870      "name": "OpenAI: GPT-4.1 Mini",
 871      "cost_per_1m_in": 0.39999999999999997,
 872      "cost_per_1m_out": 1.5999999999999999,
 873      "cost_per_1m_in_cached": 0,
 874      "cost_per_1m_out_cached": 0.09999999999999999,
 875      "context_window": 1047576,
 876      "default_max_tokens": 16384,
 877      "can_reason": false,
 878      "has_reasoning_efforts": false,
 879      "supports_attachments": true
 880    },
 881    {
 882      "id": "openai/gpt-4.1-nano",
 883      "name": "OpenAI: GPT-4.1 Nano",
 884      "cost_per_1m_in": 0.09999999999999999,
 885      "cost_per_1m_out": 0.39999999999999997,
 886      "cost_per_1m_in_cached": 0,
 887      "cost_per_1m_out_cached": 0.024999999999999998,
 888      "context_window": 1047576,
 889      "default_max_tokens": 16384,
 890      "can_reason": false,
 891      "has_reasoning_efforts": false,
 892      "supports_attachments": true
 893    },
 894    {
 895      "id": "x-ai/grok-3-mini-beta",
 896      "name": "xAI: Grok 3 Mini Beta",
 897      "cost_per_1m_in": 0.6,
 898      "cost_per_1m_out": 4,
 899      "cost_per_1m_in_cached": 0,
 900      "cost_per_1m_out_cached": 0.15,
 901      "context_window": 131072,
 902      "default_max_tokens": 13107,
 903      "can_reason": true,
 904      "has_reasoning_efforts": false,
 905      "supports_attachments": false
 906    },
 907    {
 908      "id": "x-ai/grok-3-beta",
 909      "name": "xAI: Grok 3 Beta",
 910      "cost_per_1m_in": 3,
 911      "cost_per_1m_out": 15,
 912      "cost_per_1m_in_cached": 0,
 913      "cost_per_1m_out_cached": 0.75,
 914      "context_window": 131072,
 915      "default_max_tokens": 13107,
 916      "can_reason": false,
 917      "has_reasoning_efforts": false,
 918      "supports_attachments": false
 919    },
 920    {
 921      "id": "meta-llama/llama-4-maverick:free",
 922      "name": "Meta: Llama 4 Maverick (free)",
 923      "cost_per_1m_in": 0,
 924      "cost_per_1m_out": 0,
 925      "cost_per_1m_in_cached": 0,
 926      "cost_per_1m_out_cached": 0,
 927      "context_window": 128000,
 928      "default_max_tokens": 2014,
 929      "can_reason": false,
 930      "has_reasoning_efforts": false,
 931      "supports_attachments": true
 932    },
 933    {
 934      "id": "meta-llama/llama-4-maverick",
 935      "name": "Meta: Llama 4 Maverick",
 936      "cost_per_1m_in": 0.22,
 937      "cost_per_1m_out": 0.88,
 938      "cost_per_1m_in_cached": 0,
 939      "cost_per_1m_out_cached": 0,
 940      "context_window": 1048576,
 941      "default_max_tokens": 104857,
 942      "can_reason": false,
 943      "has_reasoning_efforts": false,
 944      "supports_attachments": true
 945    },
 946    {
 947      "id": "meta-llama/llama-4-scout:free",
 948      "name": "Meta: Llama 4 Scout (free)",
 949      "cost_per_1m_in": 0,
 950      "cost_per_1m_out": 0,
 951      "cost_per_1m_in_cached": 0,
 952      "cost_per_1m_out_cached": 0,
 953      "context_window": 128000,
 954      "default_max_tokens": 2014,
 955      "can_reason": false,
 956      "has_reasoning_efforts": false,
 957      "supports_attachments": true
 958    },
 959    {
 960      "id": "meta-llama/llama-4-scout",
 961      "name": "Meta: Llama 4 Scout",
 962      "cost_per_1m_in": 0.15,
 963      "cost_per_1m_out": 0.6,
 964      "cost_per_1m_in_cached": 0,
 965      "cost_per_1m_out_cached": 0,
 966      "context_window": 1048576,
 967      "default_max_tokens": 104857,
 968      "can_reason": false,
 969      "has_reasoning_efforts": false,
 970      "supports_attachments": true
 971    },
 972    {
 973      "id": "google/gemini-2.5-pro-exp-03-25",
 974      "name": "Google: Gemini 2.5 Pro Experimental",
 975      "cost_per_1m_in": 0,
 976      "cost_per_1m_out": 0,
 977      "cost_per_1m_in_cached": 0,
 978      "cost_per_1m_out_cached": 0,
 979      "context_window": 1048576,
 980      "default_max_tokens": 32767,
 981      "can_reason": false,
 982      "has_reasoning_efforts": false,
 983      "supports_attachments": true
 984    },
 985    {
 986      "id": "deepseek/deepseek-chat-v3-0324:free",
 987      "name": "DeepSeek: DeepSeek V3 0324 (free)",
 988      "cost_per_1m_in": 0,
 989      "cost_per_1m_out": 0,
 990      "cost_per_1m_in_cached": 0,
 991      "cost_per_1m_out_cached": 0,
 992      "context_window": 163840,
 993      "default_max_tokens": 16384,
 994      "can_reason": false,
 995      "has_reasoning_efforts": false,
 996      "supports_attachments": false
 997    },
 998    {
 999      "id": "deepseek/deepseek-chat-v3-0324",
1000      "name": "DeepSeek: DeepSeek V3 0324",
1001      "cost_per_1m_in": 0.8999999999999999,
1002      "cost_per_1m_out": 0.8999999999999999,
1003      "cost_per_1m_in_cached": 0,
1004      "cost_per_1m_out_cached": 0,
1005      "context_window": 163840,
1006      "default_max_tokens": 16384,
1007      "can_reason": false,
1008      "has_reasoning_efforts": false,
1009      "supports_attachments": false
1010    },
1011    {
1012      "id": "mistralai/mistral-small-3.1-24b-instruct:free",
1013      "name": "Mistral: Mistral Small 3.1 24B (free)",
1014      "cost_per_1m_in": 0,
1015      "cost_per_1m_out": 0,
1016      "cost_per_1m_in_cached": 0,
1017      "cost_per_1m_out_cached": 0,
1018      "context_window": 128000,
1019      "default_max_tokens": 12800,
1020      "can_reason": false,
1021      "has_reasoning_efforts": false,
1022      "supports_attachments": true
1023    },
1024    {
1025      "id": "mistralai/mistral-small-3.1-24b-instruct",
1026      "name": "Mistral: Mistral Small 3.1 24B",
1027      "cost_per_1m_in": 0.09999999999999999,
1028      "cost_per_1m_out": 0.3,
1029      "cost_per_1m_in_cached": 0,
1030      "cost_per_1m_out_cached": 0,
1031      "context_window": 131072,
1032      "default_max_tokens": 13107,
1033      "can_reason": false,
1034      "has_reasoning_efforts": false,
1035      "supports_attachments": true
1036    },
1037    {
1038      "id": "google/gemini-2.0-flash-lite-001",
1039      "name": "Google: Gemini 2.0 Flash Lite",
1040      "cost_per_1m_in": 0.075,
1041      "cost_per_1m_out": 0.3,
1042      "cost_per_1m_in_cached": 0,
1043      "cost_per_1m_out_cached": 0,
1044      "context_window": 1048576,
1045      "default_max_tokens": 4096,
1046      "can_reason": false,
1047      "has_reasoning_efforts": false,
1048      "supports_attachments": true
1049    },
1050    {
1051      "id": "anthropic/claude-3.7-sonnet",
1052      "name": "Anthropic: Claude 3.7 Sonnet",
1053      "cost_per_1m_in": 3,
1054      "cost_per_1m_out": 15,
1055      "cost_per_1m_in_cached": 3.75,
1056      "cost_per_1m_out_cached": 0.3,
1057      "context_window": 200000,
1058      "default_max_tokens": 64000,
1059      "can_reason": true,
1060      "has_reasoning_efforts": false,
1061      "supports_attachments": true
1062    },
1063    {
1064      "id": "anthropic/claude-3.7-sonnet:thinking",
1065      "name": "Anthropic: Claude 3.7 Sonnet (thinking)",
1066      "cost_per_1m_in": 3,
1067      "cost_per_1m_out": 15,
1068      "cost_per_1m_in_cached": 3.75,
1069      "cost_per_1m_out_cached": 0.3,
1070      "context_window": 200000,
1071      "default_max_tokens": 32000,
1072      "can_reason": true,
1073      "has_reasoning_efforts": false,
1074      "supports_attachments": true
1075    },
1076    {
1077      "id": "mistralai/mistral-saba",
1078      "name": "Mistral: Saba",
1079      "cost_per_1m_in": 0.19999999999999998,
1080      "cost_per_1m_out": 0.6,
1081      "cost_per_1m_in_cached": 0,
1082      "cost_per_1m_out_cached": 0,
1083      "context_window": 32768,
1084      "default_max_tokens": 3276,
1085      "can_reason": false,
1086      "has_reasoning_efforts": false,
1087      "supports_attachments": false
1088    },
1089    {
1090      "id": "openai/o3-mini-high",
1091      "name": "OpenAI: o3 Mini High",
1092      "cost_per_1m_in": 1.1,
1093      "cost_per_1m_out": 4.4,
1094      "cost_per_1m_in_cached": 0,
1095      "cost_per_1m_out_cached": 0.55,
1096      "context_window": 200000,
1097      "default_max_tokens": 50000,
1098      "can_reason": false,
1099      "has_reasoning_efforts": false,
1100      "supports_attachments": false
1101    },
1102    {
1103      "id": "google/gemini-2.0-flash-001",
1104      "name": "Google: Gemini 2.0 Flash",
1105      "cost_per_1m_in": 0.09999999999999999,
1106      "cost_per_1m_out": 0.39999999999999997,
1107      "cost_per_1m_in_cached": 0.18330000000000002,
1108      "cost_per_1m_out_cached": 0.024999999999999998,
1109      "context_window": 1048576,
1110      "default_max_tokens": 4096,
1111      "can_reason": false,
1112      "has_reasoning_efforts": false,
1113      "supports_attachments": true
1114    },
1115    {
1116      "id": "qwen/qwen-turbo",
1117      "name": "Qwen: Qwen-Turbo",
1118      "cost_per_1m_in": 0.049999999999999996,
1119      "cost_per_1m_out": 0.19999999999999998,
1120      "cost_per_1m_in_cached": 0,
1121      "cost_per_1m_out_cached": 0.02,
1122      "context_window": 1000000,
1123      "default_max_tokens": 4096,
1124      "can_reason": false,
1125      "has_reasoning_efforts": false,
1126      "supports_attachments": false
1127    },
1128    {
1129      "id": "qwen/qwen-plus",
1130      "name": "Qwen: Qwen-Plus",
1131      "cost_per_1m_in": 0.39999999999999997,
1132      "cost_per_1m_out": 1.2,
1133      "cost_per_1m_in_cached": 0,
1134      "cost_per_1m_out_cached": 0.16,
1135      "context_window": 131072,
1136      "default_max_tokens": 4096,
1137      "can_reason": false,
1138      "has_reasoning_efforts": false,
1139      "supports_attachments": false
1140    },
1141    {
1142      "id": "qwen/qwen-max",
1143      "name": "Qwen: Qwen-Max ",
1144      "cost_per_1m_in": 1.5999999999999999,
1145      "cost_per_1m_out": 6.3999999999999995,
1146      "cost_per_1m_in_cached": 0,
1147      "cost_per_1m_out_cached": 0.64,
1148      "context_window": 32768,
1149      "default_max_tokens": 4096,
1150      "can_reason": false,
1151      "has_reasoning_efforts": false,
1152      "supports_attachments": false
1153    },
1154    {
1155      "id": "openai/o3-mini",
1156      "name": "OpenAI: o3 Mini",
1157      "cost_per_1m_in": 1.1,
1158      "cost_per_1m_out": 4.4,
1159      "cost_per_1m_in_cached": 0,
1160      "cost_per_1m_out_cached": 0.55,
1161      "context_window": 200000,
1162      "default_max_tokens": 50000,
1163      "can_reason": false,
1164      "has_reasoning_efforts": false,
1165      "supports_attachments": false
1166    },
1167    {
1168      "id": "mistralai/mistral-small-24b-instruct-2501",
1169      "name": "Mistral: Mistral Small 3",
1170      "cost_per_1m_in": 0.09999999999999999,
1171      "cost_per_1m_out": 0.3,
1172      "cost_per_1m_in_cached": 0,
1173      "cost_per_1m_out_cached": 0,
1174      "context_window": 32768,
1175      "default_max_tokens": 3276,
1176      "can_reason": false,
1177      "has_reasoning_efforts": false,
1178      "supports_attachments": false
1179    },
1180    {
1181      "id": "deepseek/deepseek-r1",
1182      "name": "DeepSeek: R1",
1183      "cost_per_1m_in": 0.7,
1184      "cost_per_1m_out": 2.4,
1185      "cost_per_1m_in_cached": 0,
1186      "cost_per_1m_out_cached": 0,
1187      "context_window": 163840,
1188      "default_max_tokens": 81920,
1189      "can_reason": true,
1190      "has_reasoning_efforts": false,
1191      "supports_attachments": false
1192    },
1193    {
1194      "id": "mistralai/codestral-2501",
1195      "name": "Mistral: Codestral 2501",
1196      "cost_per_1m_in": 0.3,
1197      "cost_per_1m_out": 0.8999999999999999,
1198      "cost_per_1m_in_cached": 0,
1199      "cost_per_1m_out_cached": 0,
1200      "context_window": 262144,
1201      "default_max_tokens": 26214,
1202      "can_reason": false,
1203      "has_reasoning_efforts": false,
1204      "supports_attachments": false
1205    },
1206    {
1207      "id": "deepseek/deepseek-chat",
1208      "name": "DeepSeek: DeepSeek V3",
1209      "cost_per_1m_in": 0.39999999999999997,
1210      "cost_per_1m_out": 1.3,
1211      "cost_per_1m_in_cached": 0,
1212      "cost_per_1m_out_cached": 0,
1213      "context_window": 64000,
1214      "default_max_tokens": 8000,
1215      "can_reason": false,
1216      "has_reasoning_efforts": false,
1217      "supports_attachments": false
1218    },
1219    {
1220      "id": "openai/o1",
1221      "name": "OpenAI: o1",
1222      "cost_per_1m_in": 15,
1223      "cost_per_1m_out": 60,
1224      "cost_per_1m_in_cached": 0,
1225      "cost_per_1m_out_cached": 7.5,
1226      "context_window": 200000,
1227      "default_max_tokens": 50000,
1228      "can_reason": false,
1229      "has_reasoning_efforts": false,
1230      "supports_attachments": true
1231    },
1232    {
1233      "id": "x-ai/grok-2-1212",
1234      "name": "xAI: Grok 2 1212",
1235      "cost_per_1m_in": 2,
1236      "cost_per_1m_out": 10,
1237      "cost_per_1m_in_cached": 0,
1238      "cost_per_1m_out_cached": 0,
1239      "context_window": 131072,
1240      "default_max_tokens": 13107,
1241      "can_reason": false,
1242      "has_reasoning_efforts": false,
1243      "supports_attachments": false
1244    },
1245    {
1246      "id": "google/gemini-2.0-flash-exp:free",
1247      "name": "Google: Gemini 2.0 Flash Experimental (free)",
1248      "cost_per_1m_in": 0,
1249      "cost_per_1m_out": 0,
1250      "cost_per_1m_in_cached": 0,
1251      "cost_per_1m_out_cached": 0,
1252      "context_window": 1048576,
1253      "default_max_tokens": 4096,
1254      "can_reason": false,
1255      "has_reasoning_efforts": false,
1256      "supports_attachments": true
1257    },
1258    {
1259      "id": "meta-llama/llama-3.3-70b-instruct:free",
1260      "name": "Meta: Llama 3.3 70B Instruct (free)",
1261      "cost_per_1m_in": 0,
1262      "cost_per_1m_out": 0,
1263      "cost_per_1m_in_cached": 0,
1264      "cost_per_1m_out_cached": 0,
1265      "context_window": 65536,
1266      "default_max_tokens": 6553,
1267      "can_reason": false,
1268      "has_reasoning_efforts": false,
1269      "supports_attachments": false
1270    },
1271    {
1272      "id": "meta-llama/llama-3.3-70b-instruct",
1273      "name": "Meta: Llama 3.3 70B Instruct",
1274      "cost_per_1m_in": 0.039,
1275      "cost_per_1m_out": 0.12,
1276      "cost_per_1m_in_cached": 0,
1277      "cost_per_1m_out_cached": 0,
1278      "context_window": 131072,
1279      "default_max_tokens": 4096,
1280      "can_reason": false,
1281      "has_reasoning_efforts": false,
1282      "supports_attachments": false
1283    },
1284    {
1285      "id": "amazon/nova-lite-v1",
1286      "name": "Amazon: Nova Lite 1.0",
1287      "cost_per_1m_in": 0.06,
1288      "cost_per_1m_out": 0.24,
1289      "cost_per_1m_in_cached": 0,
1290      "cost_per_1m_out_cached": 0,
1291      "context_window": 300000,
1292      "default_max_tokens": 2560,
1293      "can_reason": false,
1294      "has_reasoning_efforts": false,
1295      "supports_attachments": true
1296    },
1297    {
1298      "id": "amazon/nova-micro-v1",
1299      "name": "Amazon: Nova Micro 1.0",
1300      "cost_per_1m_in": 0.035,
1301      "cost_per_1m_out": 0.14,
1302      "cost_per_1m_in_cached": 0,
1303      "cost_per_1m_out_cached": 0,
1304      "context_window": 128000,
1305      "default_max_tokens": 2560,
1306      "can_reason": false,
1307      "has_reasoning_efforts": false,
1308      "supports_attachments": false
1309    },
1310    {
1311      "id": "amazon/nova-pro-v1",
1312      "name": "Amazon: Nova Pro 1.0",
1313      "cost_per_1m_in": 0.7999999999999999,
1314      "cost_per_1m_out": 3.1999999999999997,
1315      "cost_per_1m_in_cached": 0,
1316      "cost_per_1m_out_cached": 0,
1317      "context_window": 300000,
1318      "default_max_tokens": 2560,
1319      "can_reason": false,
1320      "has_reasoning_efforts": false,
1321      "supports_attachments": true
1322    },
1323    {
1324      "id": "openai/gpt-4o-2024-11-20",
1325      "name": "OpenAI: GPT-4o (2024-11-20)",
1326      "cost_per_1m_in": 2.5,
1327      "cost_per_1m_out": 10,
1328      "cost_per_1m_in_cached": 0,
1329      "cost_per_1m_out_cached": 1.25,
1330      "context_window": 128000,
1331      "default_max_tokens": 8192,
1332      "can_reason": false,
1333      "has_reasoning_efforts": false,
1334      "supports_attachments": true
1335    },
1336    {
1337      "id": "mistralai/mistral-large-2411",
1338      "name": "Mistral Large 2411",
1339      "cost_per_1m_in": 2,
1340      "cost_per_1m_out": 6,
1341      "cost_per_1m_in_cached": 0,
1342      "cost_per_1m_out_cached": 0,
1343      "context_window": 131072,
1344      "default_max_tokens": 13107,
1345      "can_reason": false,
1346      "has_reasoning_efforts": false,
1347      "supports_attachments": false
1348    },
1349    {
1350      "id": "mistralai/mistral-large-2407",
1351      "name": "Mistral Large 2407",
1352      "cost_per_1m_in": 2,
1353      "cost_per_1m_out": 6,
1354      "cost_per_1m_in_cached": 0,
1355      "cost_per_1m_out_cached": 0,
1356      "context_window": 131072,
1357      "default_max_tokens": 13107,
1358      "can_reason": false,
1359      "has_reasoning_efforts": false,
1360      "supports_attachments": false
1361    },
1362    {
1363      "id": "mistralai/pixtral-large-2411",
1364      "name": "Mistral: Pixtral Large 2411",
1365      "cost_per_1m_in": 2,
1366      "cost_per_1m_out": 6,
1367      "cost_per_1m_in_cached": 0,
1368      "cost_per_1m_out_cached": 0,
1369      "context_window": 131072,
1370      "default_max_tokens": 13107,
1371      "can_reason": false,
1372      "has_reasoning_efforts": false,
1373      "supports_attachments": true
1374    },
1375    {
1376      "id": "thedrummer/unslopnemo-12b",
1377      "name": "TheDrummer: UnslopNemo 12B",
1378      "cost_per_1m_in": 0.39999999999999997,
1379      "cost_per_1m_out": 0.39999999999999997,
1380      "cost_per_1m_in_cached": 0,
1381      "cost_per_1m_out_cached": 0,
1382      "context_window": 32000,
1383      "default_max_tokens": 16000,
1384      "can_reason": false,
1385      "has_reasoning_efforts": false,
1386      "supports_attachments": false
1387    },
1388    {
1389      "id": "anthropic/claude-3.5-haiku",
1390      "name": "Anthropic: Claude 3.5 Haiku",
1391      "cost_per_1m_in": 0.7999999999999999,
1392      "cost_per_1m_out": 4,
1393      "cost_per_1m_in_cached": 1,
1394      "cost_per_1m_out_cached": 0.08,
1395      "context_window": 200000,
1396      "default_max_tokens": 4096,
1397      "can_reason": false,
1398      "has_reasoning_efforts": false,
1399      "supports_attachments": true
1400    },
1401    {
1402      "id": "anthropic/claude-3.5-haiku-20241022",
1403      "name": "Anthropic: Claude 3.5 Haiku (2024-10-22)",
1404      "cost_per_1m_in": 0.7999999999999999,
1405      "cost_per_1m_out": 4,
1406      "cost_per_1m_in_cached": 1,
1407      "cost_per_1m_out_cached": 0.08,
1408      "context_window": 200000,
1409      "default_max_tokens": 4096,
1410      "can_reason": false,
1411      "has_reasoning_efforts": false,
1412      "supports_attachments": true
1413    },
1414    {
1415      "id": "anthropic/claude-3.5-sonnet",
1416      "name": "Anthropic: Claude 3.5 Sonnet",
1417      "cost_per_1m_in": 3,
1418      "cost_per_1m_out": 15,
1419      "cost_per_1m_in_cached": 3.75,
1420      "cost_per_1m_out_cached": 0.3,
1421      "context_window": 200000,
1422      "default_max_tokens": 4096,
1423      "can_reason": false,
1424      "has_reasoning_efforts": false,
1425      "supports_attachments": true
1426    },
1427    {
1428      "id": "mistralai/ministral-8b",
1429      "name": "Mistral: Ministral 8B",
1430      "cost_per_1m_in": 0.09999999999999999,
1431      "cost_per_1m_out": 0.09999999999999999,
1432      "cost_per_1m_in_cached": 0,
1433      "cost_per_1m_out_cached": 0,
1434      "context_window": 128000,
1435      "default_max_tokens": 12800,
1436      "can_reason": false,
1437      "has_reasoning_efforts": false,
1438      "supports_attachments": false
1439    },
1440    {
1441      "id": "nvidia/llama-3.1-nemotron-70b-instruct",
1442      "name": "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
1443      "cost_per_1m_in": 0.12,
1444      "cost_per_1m_out": 0.3,
1445      "cost_per_1m_in_cached": 0,
1446      "cost_per_1m_out_cached": 0,
1447      "context_window": 131072,
1448      "default_max_tokens": 8192,
1449      "can_reason": false,
1450      "has_reasoning_efforts": false,
1451      "supports_attachments": false
1452    },
1453    {
1454      "id": "google/gemini-flash-1.5-8b",
1455      "name": "Google: Gemini 1.5 Flash 8B",
1456      "cost_per_1m_in": 0.0375,
1457      "cost_per_1m_out": 0.15,
1458      "cost_per_1m_in_cached": 0.0583,
1459      "cost_per_1m_out_cached": 0.01,
1460      "context_window": 1000000,
1461      "default_max_tokens": 4096,
1462      "can_reason": false,
1463      "has_reasoning_efforts": false,
1464      "supports_attachments": true
1465    },
1466    {
1467      "id": "thedrummer/rocinante-12b",
1468      "name": "TheDrummer: Rocinante 12B",
1469      "cost_per_1m_in": 0.16999999999999998,
1470      "cost_per_1m_out": 0.43,
1471      "cost_per_1m_in_cached": 0,
1472      "cost_per_1m_out_cached": 0,
1473      "context_window": 32768,
1474      "default_max_tokens": 3276,
1475      "can_reason": false,
1476      "has_reasoning_efforts": false,
1477      "supports_attachments": false
1478    },
1479    {
1480      "id": "meta-llama/llama-3.2-3b-instruct",
1481      "name": "Meta: Llama 3.2 3B Instruct",
1482      "cost_per_1m_in": 0.03,
1483      "cost_per_1m_out": 0.049999999999999996,
1484      "cost_per_1m_in_cached": 0,
1485      "cost_per_1m_out_cached": 0,
1486      "context_window": 32768,
1487      "default_max_tokens": 16000,
1488      "can_reason": false,
1489      "has_reasoning_efforts": false,
1490      "supports_attachments": false
1491    },
1492    {
1493      "id": "qwen/qwen-2.5-72b-instruct",
1494      "name": "Qwen2.5 72B Instruct",
1495      "cost_per_1m_in": 0.12,
1496      "cost_per_1m_out": 0.39,
1497      "cost_per_1m_in_cached": 0,
1498      "cost_per_1m_out_cached": 0,
1499      "context_window": 32768,
1500      "default_max_tokens": 8192,
1501      "can_reason": false,
1502      "has_reasoning_efforts": false,
1503      "supports_attachments": false
1504    },
1505    {
1506      "id": "cohere/command-r-plus-08-2024",
1507      "name": "Cohere: Command R+ (08-2024)",
1508      "cost_per_1m_in": 2.5,
1509      "cost_per_1m_out": 10,
1510      "cost_per_1m_in_cached": 0,
1511      "cost_per_1m_out_cached": 0,
1512      "context_window": 128000,
1513      "default_max_tokens": 2000,
1514      "can_reason": false,
1515      "has_reasoning_efforts": false,
1516      "supports_attachments": false
1517    },
1518    {
1519      "id": "cohere/command-r-08-2024",
1520      "name": "Cohere: Command R (08-2024)",
1521      "cost_per_1m_in": 0.15,
1522      "cost_per_1m_out": 0.6,
1523      "cost_per_1m_in_cached": 0,
1524      "cost_per_1m_out_cached": 0,
1525      "context_window": 128000,
1526      "default_max_tokens": 2000,
1527      "can_reason": false,
1528      "has_reasoning_efforts": false,
1529      "supports_attachments": false
1530    },
1531    {
1532      "id": "microsoft/phi-3.5-mini-128k-instruct",
1533      "name": "Microsoft: Phi-3.5 Mini 128K Instruct",
1534      "cost_per_1m_in": 0.09999999999999999,
1535      "cost_per_1m_out": 0.09999999999999999,
1536      "cost_per_1m_in_cached": 0,
1537      "cost_per_1m_out_cached": 0,
1538      "context_window": 128000,
1539      "default_max_tokens": 12800,
1540      "can_reason": false,
1541      "has_reasoning_efforts": false,
1542      "supports_attachments": false
1543    },
1544    {
1545      "id": "nousresearch/hermes-3-llama-3.1-70b",
1546      "name": "Nous: Hermes 3 70B Instruct",
1547      "cost_per_1m_in": 0.39999999999999997,
1548      "cost_per_1m_out": 0.39999999999999997,
1549      "cost_per_1m_in_cached": 0,
1550      "cost_per_1m_out_cached": 0,
1551      "context_window": 12288,
1552      "default_max_tokens": 1228,
1553      "can_reason": false,
1554      "has_reasoning_efforts": false,
1555      "supports_attachments": false
1556    },
1557    {
1558      "id": "openai/gpt-4o-2024-08-06",
1559      "name": "OpenAI: GPT-4o (2024-08-06)",
1560      "cost_per_1m_in": 2.5,
1561      "cost_per_1m_out": 10,
1562      "cost_per_1m_in_cached": 0,
1563      "cost_per_1m_out_cached": 1.25,
1564      "context_window": 128000,
1565      "default_max_tokens": 8192,
1566      "can_reason": false,
1567      "has_reasoning_efforts": false,
1568      "supports_attachments": true
1569    },
1570    {
1571      "id": "meta-llama/llama-3.1-8b-instruct",
1572      "name": "Meta: Llama 3.1 8B Instruct",
1573      "cost_per_1m_in": 0.03,
1574      "cost_per_1m_out": 0.049999999999999996,
1575      "cost_per_1m_in_cached": 0,
1576      "cost_per_1m_out_cached": 0,
1577      "context_window": 131072,
1578      "default_max_tokens": 8192,
1579      "can_reason": false,
1580      "has_reasoning_efforts": false,
1581      "supports_attachments": false
1582    },
1583    {
1584      "id": "meta-llama/llama-3.1-405b-instruct",
1585      "name": "Meta: Llama 3.1 405B Instruct",
1586      "cost_per_1m_in": 0.7999999999999999,
1587      "cost_per_1m_out": 0.7999999999999999,
1588      "cost_per_1m_in_cached": 0,
1589      "cost_per_1m_out_cached": 0,
1590      "context_window": 32768,
1591      "default_max_tokens": 8192,
1592      "can_reason": false,
1593      "has_reasoning_efforts": false,
1594      "supports_attachments": false
1595    },
1596    {
1597      "id": "meta-llama/llama-3.1-70b-instruct",
1598      "name": "Meta: Llama 3.1 70B Instruct",
1599      "cost_per_1m_in": 0.22999999999999998,
1600      "cost_per_1m_out": 0.39999999999999997,
1601      "cost_per_1m_in_cached": 0,
1602      "cost_per_1m_out_cached": 0,
1603      "context_window": 131072,
1604      "default_max_tokens": 13107,
1605      "can_reason": false,
1606      "has_reasoning_efforts": false,
1607      "supports_attachments": false
1608    },
1609    {
1610      "id": "mistralai/mistral-nemo",
1611      "name": "Mistral: Mistral Nemo",
1612      "cost_per_1m_in": 0.15,
1613      "cost_per_1m_out": 0.15,
1614      "cost_per_1m_in_cached": 0,
1615      "cost_per_1m_out_cached": 0,
1616      "context_window": 131072,
1617      "default_max_tokens": 13107,
1618      "can_reason": false,
1619      "has_reasoning_efforts": false,
1620      "supports_attachments": false
1621    },
1622    {
1623      "id": "openai/gpt-4o-mini",
1624      "name": "OpenAI: GPT-4o-mini",
1625      "cost_per_1m_in": 0.15,
1626      "cost_per_1m_out": 0.6,
1627      "cost_per_1m_in_cached": 0,
1628      "cost_per_1m_out_cached": 0.075,
1629      "context_window": 128000,
1630      "default_max_tokens": 8192,
1631      "can_reason": false,
1632      "has_reasoning_efforts": false,
1633      "supports_attachments": true
1634    },
1635    {
1636      "id": "openai/gpt-4o-mini-2024-07-18",
1637      "name": "OpenAI: GPT-4o-mini (2024-07-18)",
1638      "cost_per_1m_in": 0.15,
1639      "cost_per_1m_out": 0.6,
1640      "cost_per_1m_in_cached": 0,
1641      "cost_per_1m_out_cached": 0.075,
1642      "context_window": 128000,
1643      "default_max_tokens": 8192,
1644      "can_reason": false,
1645      "has_reasoning_efforts": false,
1646      "supports_attachments": true
1647    },
1648    {
1649      "id": "anthropic/claude-3.5-sonnet-20240620",
1650      "name": "Anthropic: Claude 3.5 Sonnet (2024-06-20)",
1651      "cost_per_1m_in": 3,
1652      "cost_per_1m_out": 15,
1653      "cost_per_1m_in_cached": 3.75,
1654      "cost_per_1m_out_cached": 0.3,
1655      "context_window": 200000,
1656      "default_max_tokens": 4096,
1657      "can_reason": false,
1658      "has_reasoning_efforts": false,
1659      "supports_attachments": true
1660    },
1661    {
1662      "id": "mistralai/mistral-7b-instruct:free",
1663      "name": "Mistral: Mistral 7B Instruct (free)",
1664      "cost_per_1m_in": 0,
1665      "cost_per_1m_out": 0,
1666      "cost_per_1m_in_cached": 0,
1667      "cost_per_1m_out_cached": 0,
1668      "context_window": 32768,
1669      "default_max_tokens": 8192,
1670      "can_reason": false,
1671      "has_reasoning_efforts": false,
1672      "supports_attachments": false
1673    },
1674    {
1675      "id": "mistralai/mistral-7b-instruct",
1676      "name": "Mistral: Mistral 7B Instruct",
1677      "cost_per_1m_in": 0.028,
1678      "cost_per_1m_out": 0.054,
1679      "cost_per_1m_in_cached": 0,
1680      "cost_per_1m_out_cached": 0,
1681      "context_window": 32768,
1682      "default_max_tokens": 8192,
1683      "can_reason": false,
1684      "has_reasoning_efforts": false,
1685      "supports_attachments": false
1686    },
1687    {
1688      "id": "mistralai/mistral-7b-instruct-v0.3",
1689      "name": "Mistral: Mistral 7B Instruct v0.3",
1690      "cost_per_1m_in": 0.028,
1691      "cost_per_1m_out": 0.054,
1692      "cost_per_1m_in_cached": 0,
1693      "cost_per_1m_out_cached": 0,
1694      "context_window": 32768,
1695      "default_max_tokens": 8192,
1696      "can_reason": false,
1697      "has_reasoning_efforts": false,
1698      "supports_attachments": false
1699    },
1700    {
1701      "id": "microsoft/phi-3-mini-128k-instruct",
1702      "name": "Microsoft: Phi-3 Mini 128K Instruct",
1703      "cost_per_1m_in": 0.09999999999999999,
1704      "cost_per_1m_out": 0.09999999999999999,
1705      "cost_per_1m_in_cached": 0,
1706      "cost_per_1m_out_cached": 0,
1707      "context_window": 128000,
1708      "default_max_tokens": 12800,
1709      "can_reason": false,
1710      "has_reasoning_efforts": false,
1711      "supports_attachments": false
1712    },
1713    {
1714      "id": "microsoft/phi-3-medium-128k-instruct",
1715      "name": "Microsoft: Phi-3 Medium 128K Instruct",
1716      "cost_per_1m_in": 1,
1717      "cost_per_1m_out": 1,
1718      "cost_per_1m_in_cached": 0,
1719      "cost_per_1m_out_cached": 0,
1720      "context_window": 128000,
1721      "default_max_tokens": 12800,
1722      "can_reason": false,
1723      "has_reasoning_efforts": false,
1724      "supports_attachments": false
1725    },
1726    {
1727      "id": "google/gemini-flash-1.5",
1728      "name": "Google: Gemini 1.5 Flash ",
1729      "cost_per_1m_in": 0.075,
1730      "cost_per_1m_out": 0.3,
1731      "cost_per_1m_in_cached": 0.1583,
1732      "cost_per_1m_out_cached": 0.01875,
1733      "context_window": 1000000,
1734      "default_max_tokens": 4096,
1735      "can_reason": false,
1736      "has_reasoning_efforts": false,
1737      "supports_attachments": true
1738    },
1739    {
1740      "id": "openai/gpt-4o",
1741      "name": "OpenAI: GPT-4o",
1742      "cost_per_1m_in": 2.5,
1743      "cost_per_1m_out": 10,
1744      "cost_per_1m_in_cached": 0,
1745      "cost_per_1m_out_cached": 0,
1746      "context_window": 128000,
1747      "default_max_tokens": 8192,
1748      "can_reason": false,
1749      "has_reasoning_efforts": false,
1750      "supports_attachments": true
1751    },
1752    {
1753      "id": "openai/gpt-4o:extended",
1754      "name": "OpenAI: GPT-4o (extended)",
1755      "cost_per_1m_in": 6,
1756      "cost_per_1m_out": 18,
1757      "cost_per_1m_in_cached": 0,
1758      "cost_per_1m_out_cached": 0,
1759      "context_window": 128000,
1760      "default_max_tokens": 32000,
1761      "can_reason": false,
1762      "has_reasoning_efforts": false,
1763      "supports_attachments": true
1764    },
1765    {
1766      "id": "openai/gpt-4o-2024-05-13",
1767      "name": "OpenAI: GPT-4o (2024-05-13)",
1768      "cost_per_1m_in": 5,
1769      "cost_per_1m_out": 15,
1770      "cost_per_1m_in_cached": 0,
1771      "cost_per_1m_out_cached": 0,
1772      "context_window": 128000,
1773      "default_max_tokens": 2048,
1774      "can_reason": false,
1775      "has_reasoning_efforts": false,
1776      "supports_attachments": true
1777    },
1778    {
1779      "id": "meta-llama/llama-3-8b-instruct",
1780      "name": "Meta: Llama 3 8B Instruct",
1781      "cost_per_1m_in": 0.03,
1782      "cost_per_1m_out": 0.06,
1783      "cost_per_1m_in_cached": 0,
1784      "cost_per_1m_out_cached": 0,
1785      "context_window": 8192,
1786      "default_max_tokens": 8192,
1787      "can_reason": false,
1788      "has_reasoning_efforts": false,
1789      "supports_attachments": false
1790    },
1791    {
1792      "id": "meta-llama/llama-3-70b-instruct",
1793      "name": "Meta: Llama 3 70B Instruct",
1794      "cost_per_1m_in": 0.3,
1795      "cost_per_1m_out": 0.39999999999999997,
1796      "cost_per_1m_in_cached": 0,
1797      "cost_per_1m_out_cached": 0,
1798      "context_window": 8192,
1799      "default_max_tokens": 8192,
1800      "can_reason": false,
1801      "has_reasoning_efforts": false,
1802      "supports_attachments": false
1803    },
1804    {
1805      "id": "mistralai/mixtral-8x22b-instruct",
1806      "name": "Mistral: Mixtral 8x22B Instruct",
1807      "cost_per_1m_in": 2,
1808      "cost_per_1m_out": 6,
1809      "cost_per_1m_in_cached": 0,
1810      "cost_per_1m_out_cached": 0,
1811      "context_window": 65536,
1812      "default_max_tokens": 6553,
1813      "can_reason": false,
1814      "has_reasoning_efforts": false,
1815      "supports_attachments": false
1816    },
1817    {
1818      "id": "google/gemini-pro-1.5",
1819      "name": "Google: Gemini 1.5 Pro",
1820      "cost_per_1m_in": 1.25,
1821      "cost_per_1m_out": 5,
1822      "cost_per_1m_in_cached": 0,
1823      "cost_per_1m_out_cached": 0,
1824      "context_window": 2000000,
1825      "default_max_tokens": 4096,
1826      "can_reason": false,
1827      "has_reasoning_efforts": false,
1828      "supports_attachments": true
1829    },
1830    {
1831      "id": "openai/gpt-4-turbo",
1832      "name": "OpenAI: GPT-4 Turbo",
1833      "cost_per_1m_in": 10,
1834      "cost_per_1m_out": 30,
1835      "cost_per_1m_in_cached": 0,
1836      "cost_per_1m_out_cached": 0,
1837      "context_window": 128000,
1838      "default_max_tokens": 2048,
1839      "can_reason": false,
1840      "has_reasoning_efforts": false,
1841      "supports_attachments": true
1842    },
1843    {
1844      "id": "cohere/command-r-plus",
1845      "name": "Cohere: Command R+",
1846      "cost_per_1m_in": 3,
1847      "cost_per_1m_out": 15,
1848      "cost_per_1m_in_cached": 0,
1849      "cost_per_1m_out_cached": 0,
1850      "context_window": 128000,
1851      "default_max_tokens": 2000,
1852      "can_reason": false,
1853      "has_reasoning_efforts": false,
1854      "supports_attachments": false
1855    },
1856    {
1857      "id": "cohere/command-r-plus-04-2024",
1858      "name": "Cohere: Command R+ (04-2024)",
1859      "cost_per_1m_in": 3,
1860      "cost_per_1m_out": 15,
1861      "cost_per_1m_in_cached": 0,
1862      "cost_per_1m_out_cached": 0,
1863      "context_window": 128000,
1864      "default_max_tokens": 2000,
1865      "can_reason": false,
1866      "has_reasoning_efforts": false,
1867      "supports_attachments": false
1868    },
1869    {
1870      "id": "cohere/command-r",
1871      "name": "Cohere: Command R",
1872      "cost_per_1m_in": 0.5,
1873      "cost_per_1m_out": 1.5,
1874      "cost_per_1m_in_cached": 0,
1875      "cost_per_1m_out_cached": 0,
1876      "context_window": 128000,
1877      "default_max_tokens": 2000,
1878      "can_reason": false,
1879      "has_reasoning_efforts": false,
1880      "supports_attachments": false
1881    },
1882    {
1883      "id": "anthropic/claude-3-haiku",
1884      "name": "Anthropic: Claude 3 Haiku",
1885      "cost_per_1m_in": 0.25,
1886      "cost_per_1m_out": 1.25,
1887      "cost_per_1m_in_cached": 0.3,
1888      "cost_per_1m_out_cached": 0.03,
1889      "context_window": 200000,
1890      "default_max_tokens": 2048,
1891      "can_reason": false,
1892      "has_reasoning_efforts": false,
1893      "supports_attachments": true
1894    },
1895    {
1896      "id": "anthropic/claude-3-opus",
1897      "name": "Anthropic: Claude 3 Opus",
1898      "cost_per_1m_in": 15,
1899      "cost_per_1m_out": 75,
1900      "cost_per_1m_in_cached": 18.75,
1901      "cost_per_1m_out_cached": 1.5,
1902      "context_window": 200000,
1903      "default_max_tokens": 2048,
1904      "can_reason": false,
1905      "has_reasoning_efforts": false,
1906      "supports_attachments": true
1907    },
1908    {
1909      "id": "cohere/command-r-03-2024",
1910      "name": "Cohere: Command R (03-2024)",
1911      "cost_per_1m_in": 0.5,
1912      "cost_per_1m_out": 1.5,
1913      "cost_per_1m_in_cached": 0,
1914      "cost_per_1m_out_cached": 0,
1915      "context_window": 128000,
1916      "default_max_tokens": 2000,
1917      "can_reason": false,
1918      "has_reasoning_efforts": false,
1919      "supports_attachments": false
1920    },
1921    {
1922      "id": "mistralai/mistral-large",
1923      "name": "Mistral Large",
1924      "cost_per_1m_in": 2,
1925      "cost_per_1m_out": 6,
1926      "cost_per_1m_in_cached": 0,
1927      "cost_per_1m_out_cached": 0,
1928      "context_window": 128000,
1929      "default_max_tokens": 12800,
1930      "can_reason": false,
1931      "has_reasoning_efforts": false,
1932      "supports_attachments": false
1933    },
1934    {
1935      "id": "openai/gpt-3.5-turbo-0613",
1936      "name": "OpenAI: GPT-3.5 Turbo (older v0613)",
1937      "cost_per_1m_in": 1,
1938      "cost_per_1m_out": 2,
1939      "cost_per_1m_in_cached": 0,
1940      "cost_per_1m_out_cached": 0,
1941      "context_window": 4095,
1942      "default_max_tokens": 2048,
1943      "can_reason": false,
1944      "has_reasoning_efforts": false,
1945      "supports_attachments": false
1946    },
1947    {
1948      "id": "openai/gpt-4-turbo-preview",
1949      "name": "OpenAI: GPT-4 Turbo Preview",
1950      "cost_per_1m_in": 10,
1951      "cost_per_1m_out": 30,
1952      "cost_per_1m_in_cached": 0,
1953      "cost_per_1m_out_cached": 0,
1954      "context_window": 128000,
1955      "default_max_tokens": 2048,
1956      "can_reason": false,
1957      "has_reasoning_efforts": false,
1958      "supports_attachments": false
1959    },
1960    {
1961      "id": "mistralai/mistral-small",
1962      "name": "Mistral Small",
1963      "cost_per_1m_in": 0.19999999999999998,
1964      "cost_per_1m_out": 0.6,
1965      "cost_per_1m_in_cached": 0,
1966      "cost_per_1m_out_cached": 0,
1967      "context_window": 32768,
1968      "default_max_tokens": 3276,
1969      "can_reason": false,
1970      "has_reasoning_efforts": false,
1971      "supports_attachments": false
1972    },
1973    {
1974      "id": "mistralai/mistral-tiny",
1975      "name": "Mistral Tiny",
1976      "cost_per_1m_in": 0.25,
1977      "cost_per_1m_out": 0.25,
1978      "cost_per_1m_in_cached": 0,
1979      "cost_per_1m_out_cached": 0,
1980      "context_window": 32768,
1981      "default_max_tokens": 3276,
1982      "can_reason": false,
1983      "has_reasoning_efforts": false,
1984      "supports_attachments": false
1985    },
1986    {
1987      "id": "mistralai/mixtral-8x7b-instruct",
1988      "name": "Mistral: Mixtral 8x7B Instruct",
1989      "cost_per_1m_in": 0.08,
1990      "cost_per_1m_out": 0.24,
1991      "cost_per_1m_in_cached": 0,
1992      "cost_per_1m_out_cached": 0,
1993      "context_window": 32768,
1994      "default_max_tokens": 8192,
1995      "can_reason": false,
1996      "has_reasoning_efforts": false,
1997      "supports_attachments": false
1998    },
1999    {
2000      "id": "openai/gpt-4-1106-preview",
2001      "name": "OpenAI: GPT-4 Turbo (older v1106)",
2002      "cost_per_1m_in": 10,
2003      "cost_per_1m_out": 30,
2004      "cost_per_1m_in_cached": 0,
2005      "cost_per_1m_out_cached": 0,
2006      "context_window": 128000,
2007      "default_max_tokens": 2048,
2008      "can_reason": false,
2009      "has_reasoning_efforts": false,
2010      "supports_attachments": false
2011    },
2012    {
2013      "id": "openai/gpt-3.5-turbo-16k",
2014      "name": "OpenAI: GPT-3.5 Turbo 16k",
2015      "cost_per_1m_in": 3,
2016      "cost_per_1m_out": 4,
2017      "cost_per_1m_in_cached": 0,
2018      "cost_per_1m_out_cached": 0,
2019      "context_window": 16385,
2020      "default_max_tokens": 2048,
2021      "can_reason": false,
2022      "has_reasoning_efforts": false,
2023      "supports_attachments": false
2024    },
2025    {
2026      "id": "openai/gpt-3.5-turbo",
2027      "name": "OpenAI: GPT-3.5 Turbo",
2028      "cost_per_1m_in": 0.5,
2029      "cost_per_1m_out": 1.5,
2030      "cost_per_1m_in_cached": 0,
2031      "cost_per_1m_out_cached": 0,
2032      "context_window": 16385,
2033      "default_max_tokens": 2048,
2034      "can_reason": false,
2035      "has_reasoning_efforts": false,
2036      "supports_attachments": false
2037    },
2038    {
2039      "id": "openai/gpt-4",
2040      "name": "OpenAI: GPT-4",
2041      "cost_per_1m_in": 30,
2042      "cost_per_1m_out": 60,
2043      "cost_per_1m_in_cached": 0,
2044      "cost_per_1m_out_cached": 0,
2045      "context_window": 8191,
2046      "default_max_tokens": 2048,
2047      "can_reason": false,
2048      "has_reasoning_efforts": false,
2049      "supports_attachments": false
2050    },
2051    {
2052      "id": "openai/gpt-4-0314",
2053      "name": "OpenAI: GPT-4 (older v0314)",
2054      "cost_per_1m_in": 30,
2055      "cost_per_1m_out": 60,
2056      "cost_per_1m_in_cached": 0,
2057      "cost_per_1m_out_cached": 0,
2058      "context_window": 8191,
2059      "default_max_tokens": 2048,
2060      "can_reason": false,
2061      "has_reasoning_efforts": false,
2062      "supports_attachments": false
2063    }
2064  ],
2065  "default_headers": {
2066    "HTTP-Referer": "https://charm.land",
2067    "X-Title": "Crush"
2068  }
2069}