openrouter.json

   1{
   2  "name": "OpenRouter",
   3  "id": "openrouter",
   4  "api_key": "$OPENROUTER_API_KEY",
   5  "api_endpoint": "https://openrouter.ai/api/v1",
   6  "type": "openai",
   7  "default_large_model_id": "anthropic/claude-sonnet-4",
   8  "default_small_model_id": "anthropic/claude-3.5-haiku",
   9  "models": [
  10    {
  11      "id": "mistralai/mistral-small-3.2-24b-instruct:free",
  12      "model": "Mistral: Mistral Small 3.2 24B (free)",
  13      "cost_per_1m_in": 0,
  14      "cost_per_1m_out": 0,
  15      "cost_per_1m_in_cached": 0,
  16      "cost_per_1m_out_cached": 0,
  17      "context_window": 96000,
  18      "default_max_tokens": 9600,
  19      "can_reason": false,
  20      "supports_attachments": true
  21    },
  22    {
  23      "id": "mistralai/mistral-small-3.2-24b-instruct",
  24      "model": "Mistral: Mistral Small 3.2 24B",
  25      "cost_per_1m_in": 0.049999999999999996,
  26      "cost_per_1m_out": 0.09999999999999999,
  27      "cost_per_1m_in_cached": 0,
  28      "cost_per_1m_out_cached": 0,
  29      "context_window": 128000,
  30      "default_max_tokens": 12800,
  31      "can_reason": false,
  32      "supports_attachments": true
  33    },
  34    {
  35      "id": "minimax/minimax-m1:extended",
  36      "model": "MiniMax: MiniMax M1 (extended)",
  37      "cost_per_1m_in": 0,
  38      "cost_per_1m_out": 0,
  39      "cost_per_1m_in_cached": 0,
  40      "cost_per_1m_out_cached": 0,
  41      "context_window": 256000,
  42      "default_max_tokens": 25600,
  43      "can_reason": true,
  44      "supports_attachments": false
  45    },
  46    {
  47      "id": "minimax/minimax-m1",
  48      "model": "MiniMax: MiniMax M1",
  49      "cost_per_1m_in": 0.3,
  50      "cost_per_1m_out": 1.6500000000000001,
  51      "cost_per_1m_in_cached": 0,
  52      "cost_per_1m_out_cached": 0,
  53      "context_window": 1000000,
  54      "default_max_tokens": 20000,
  55      "can_reason": true,
  56      "supports_attachments": false
  57    },
  58    {
  59      "id": "google/gemini-2.5-flash-lite-preview-06-17",
  60      "model": "Google: Gemini 2.5 Flash Lite Preview 06-17",
  61      "cost_per_1m_in": 0.09999999999999999,
  62      "cost_per_1m_out": 0.39999999999999997,
  63      "cost_per_1m_in_cached": 0,
  64      "cost_per_1m_out_cached": 0,
  65      "context_window": 1048576,
  66      "default_max_tokens": 32767,
  67      "can_reason": true,
  68      "supports_attachments": true
  69    },
  70    {
  71      "id": "google/gemini-2.5-flash",
  72      "model": "Google: Gemini 2.5 Flash",
  73      "cost_per_1m_in": 0.3,
  74      "cost_per_1m_out": 2.5,
  75      "cost_per_1m_in_cached": 0.3833,
  76      "cost_per_1m_out_cached": 0.075,
  77      "context_window": 1048576,
  78      "default_max_tokens": 32767,
  79      "can_reason": true,
  80      "supports_attachments": true
  81    },
  82    {
  83      "id": "google/gemini-2.5-pro",
  84      "model": "Google: Gemini 2.5 Pro",
  85      "cost_per_1m_in": 1.25,
  86      "cost_per_1m_out": 10,
  87      "cost_per_1m_in_cached": 1.625,
  88      "cost_per_1m_out_cached": 0.31,
  89      "context_window": 1048576,
  90      "default_max_tokens": 32768,
  91      "can_reason": true,
  92      "supports_attachments": true
  93    },
  94    {
  95      "id": "openai/o3-pro",
  96      "model": "OpenAI: o3 Pro",
  97      "cost_per_1m_in": 20,
  98      "cost_per_1m_out": 80,
  99      "cost_per_1m_in_cached": 0,
 100      "cost_per_1m_out_cached": 0,
 101      "context_window": 200000,
 102      "default_max_tokens": 50000,
 103      "can_reason": false,
 104      "supports_attachments": true
 105    },
 106    {
 107      "id": "x-ai/grok-3-mini",
 108      "model": "xAI: Grok 3 Mini",
 109      "cost_per_1m_in": 0.3,
 110      "cost_per_1m_out": 0.5,
 111      "cost_per_1m_in_cached": 0,
 112      "cost_per_1m_out_cached": 0.075,
 113      "context_window": 131072,
 114      "default_max_tokens": 13107,
 115      "can_reason": true,
 116      "supports_attachments": false
 117    },
 118    {
 119      "id": "x-ai/grok-3",
 120      "model": "xAI: Grok 3",
 121      "cost_per_1m_in": 3,
 122      "cost_per_1m_out": 15,
 123      "cost_per_1m_in_cached": 0,
 124      "cost_per_1m_out_cached": 0.75,
 125      "context_window": 131072,
 126      "default_max_tokens": 13107,
 127      "can_reason": false,
 128      "supports_attachments": false
 129    },
 130    {
 131      "id": "mistralai/magistral-small-2506",
 132      "model": "Mistral: Magistral Small 2506",
 133      "cost_per_1m_in": 0.5,
 134      "cost_per_1m_out": 1.5,
 135      "cost_per_1m_in_cached": 0,
 136      "cost_per_1m_out_cached": 0,
 137      "context_window": 40000,
 138      "default_max_tokens": 20000,
 139      "can_reason": true,
 140      "supports_attachments": false
 141    },
 142    {
 143      "id": "mistralai/magistral-medium-2506",
 144      "model": "Mistral: Magistral Medium 2506",
 145      "cost_per_1m_in": 2,
 146      "cost_per_1m_out": 5,
 147      "cost_per_1m_in_cached": 0,
 148      "cost_per_1m_out_cached": 0,
 149      "context_window": 40960,
 150      "default_max_tokens": 20000,
 151      "can_reason": true,
 152      "supports_attachments": false
 153    },
 154    {
 155      "id": "mistralai/magistral-medium-2506:thinking",
 156      "model": "Mistral: Magistral Medium 2506 (thinking)",
 157      "cost_per_1m_in": 2,
 158      "cost_per_1m_out": 5,
 159      "cost_per_1m_in_cached": 0,
 160      "cost_per_1m_out_cached": 0,
 161      "context_window": 40960,
 162      "default_max_tokens": 20000,
 163      "can_reason": true,
 164      "supports_attachments": false
 165    },
 166    {
 167      "id": "google/gemini-2.5-pro-preview",
 168      "model": "Google: Gemini 2.5 Pro Preview 06-05",
 169      "cost_per_1m_in": 1.25,
 170      "cost_per_1m_out": 10,
 171      "cost_per_1m_in_cached": 1.625,
 172      "cost_per_1m_out_cached": 0.31,
 173      "context_window": 1048576,
 174      "default_max_tokens": 32768,
 175      "can_reason": true,
 176      "supports_attachments": true
 177    },
 178    {
 179      "id": "deepseek/deepseek-r1-0528",
 180      "model": "DeepSeek: R1 0528",
 181      "cost_per_1m_in": 0.5,
 182      "cost_per_1m_out": 2.1500000000000004,
 183      "cost_per_1m_in_cached": 0,
 184      "cost_per_1m_out_cached": 0,
 185      "context_window": 128000,
 186      "default_max_tokens": 16384,
 187      "can_reason": true,
 188      "supports_attachments": false
 189    },
 190    {
 191      "id": "anthropic/claude-opus-4",
 192      "model": "Anthropic: Claude Opus 4",
 193      "cost_per_1m_in": 15,
 194      "cost_per_1m_out": 75,
 195      "cost_per_1m_in_cached": 18.75,
 196      "cost_per_1m_out_cached": 1.5,
 197      "context_window": 200000,
 198      "default_max_tokens": 16000,
 199      "can_reason": true,
 200      "supports_attachments": true
 201    },
 202    {
 203      "id": "anthropic/claude-sonnet-4",
 204      "model": "Anthropic: Claude Sonnet 4",
 205      "cost_per_1m_in": 3,
 206      "cost_per_1m_out": 15,
 207      "cost_per_1m_in_cached": 3.75,
 208      "cost_per_1m_out_cached": 0.3,
 209      "context_window": 200000,
 210      "default_max_tokens": 32000,
 211      "can_reason": true,
 212      "supports_attachments": true
 213    },
 214    {
 215      "id": "mistralai/devstral-small:free",
 216      "model": "Mistral: Devstral Small (free)",
 217      "cost_per_1m_in": 0,
 218      "cost_per_1m_out": 0,
 219      "cost_per_1m_in_cached": 0,
 220      "cost_per_1m_out_cached": 0,
 221      "context_window": 131072,
 222      "default_max_tokens": 13107,
 223      "can_reason": false,
 224      "supports_attachments": false
 225    },
 226    {
 227      "id": "mistralai/devstral-small",
 228      "model": "Mistral: Devstral Small",
 229      "cost_per_1m_in": 0.06,
 230      "cost_per_1m_out": 0.12,
 231      "cost_per_1m_in_cached": 0,
 232      "cost_per_1m_out_cached": 0,
 233      "context_window": 128000,
 234      "default_max_tokens": 12800,
 235      "can_reason": false,
 236      "supports_attachments": false
 237    },
 238    {
 239      "id": "google/gemini-2.5-flash-preview-05-20",
 240      "model": "Google: Gemini 2.5 Flash Preview 05-20",
 241      "cost_per_1m_in": 0.15,
 242      "cost_per_1m_out": 0.6,
 243      "cost_per_1m_in_cached": 0.2333,
 244      "cost_per_1m_out_cached": 0.0375,
 245      "context_window": 1048576,
 246      "default_max_tokens": 32767,
 247      "can_reason": true,
 248      "supports_attachments": true
 249    },
 250    {
 251      "id": "google/gemini-2.5-flash-preview-05-20:thinking",
 252      "model": "Google: Gemini 2.5 Flash Preview 05-20 (thinking)",
 253      "cost_per_1m_in": 0.15,
 254      "cost_per_1m_out": 3.5,
 255      "cost_per_1m_in_cached": 0.2333,
 256      "cost_per_1m_out_cached": 0.0375,
 257      "context_window": 1048576,
 258      "default_max_tokens": 32767,
 259      "can_reason": true,
 260      "supports_attachments": true
 261    },
 262    {
 263      "id": "openai/codex-mini",
 264      "model": "OpenAI: Codex Mini",
 265      "cost_per_1m_in": 1.5,
 266      "cost_per_1m_out": 6,
 267      "cost_per_1m_in_cached": 0,
 268      "cost_per_1m_out_cached": 0.375,
 269      "context_window": 200000,
 270      "default_max_tokens": 50000,
 271      "can_reason": false,
 272      "supports_attachments": true
 273    },
 274    {
 275      "id": "mistralai/mistral-medium-3",
 276      "model": "Mistral: Mistral Medium 3",
 277      "cost_per_1m_in": 0.39999999999999997,
 278      "cost_per_1m_out": 2,
 279      "cost_per_1m_in_cached": 0,
 280      "cost_per_1m_out_cached": 0,
 281      "context_window": 32768,
 282      "default_max_tokens": 3276,
 283      "can_reason": false,
 284      "supports_attachments": true
 285    },
 286    {
 287      "id": "google/gemini-2.5-pro-preview-05-06",
 288      "model": "Google: Gemini 2.5 Pro Preview 05-06",
 289      "cost_per_1m_in": 1.25,
 290      "cost_per_1m_out": 10,
 291      "cost_per_1m_in_cached": 1.625,
 292      "cost_per_1m_out_cached": 0.31,
 293      "context_window": 1048576,
 294      "default_max_tokens": 32767,
 295      "can_reason": false,
 296      "supports_attachments": true
 297    },
 298    {
 299      "id": "arcee-ai/caller-large",
 300      "model": "Arcee AI: Caller Large",
 301      "cost_per_1m_in": 0.55,
 302      "cost_per_1m_out": 0.85,
 303      "cost_per_1m_in_cached": 0,
 304      "cost_per_1m_out_cached": 0,
 305      "context_window": 32768,
 306      "default_max_tokens": 3276,
 307      "can_reason": false,
 308      "supports_attachments": false
 309    },
 310    {
 311      "id": "arcee-ai/virtuoso-large",
 312      "model": "Arcee AI: Virtuoso Large",
 313      "cost_per_1m_in": 0.75,
 314      "cost_per_1m_out": 1.2,
 315      "cost_per_1m_in_cached": 0,
 316      "cost_per_1m_out_cached": 0,
 317      "context_window": 131072,
 318      "default_max_tokens": 32000,
 319      "can_reason": false,
 320      "supports_attachments": false
 321    },
 322    {
 323      "id": "arcee-ai/virtuoso-medium-v2",
 324      "model": "Arcee AI: Virtuoso Medium V2",
 325      "cost_per_1m_in": 0.5,
 326      "cost_per_1m_out": 0.7999999999999999,
 327      "cost_per_1m_in_cached": 0,
 328      "cost_per_1m_out_cached": 0,
 329      "context_window": 131072,
 330      "default_max_tokens": 16384,
 331      "can_reason": false,
 332      "supports_attachments": false
 333    },
 334    {
 335      "id": "qwen/qwen3-30b-a3b",
 336      "model": "Qwen: Qwen3 30B A3B",
 337      "cost_per_1m_in": 0.08,
 338      "cost_per_1m_out": 0.29,
 339      "cost_per_1m_in_cached": 0,
 340      "cost_per_1m_out_cached": 0,
 341      "context_window": 40960,
 342      "default_max_tokens": 20480,
 343      "can_reason": true,
 344      "supports_attachments": false
 345    },
 346    {
 347      "id": "qwen/qwen3-14b",
 348      "model": "Qwen: Qwen3 14B",
 349      "cost_per_1m_in": 0.06,
 350      "cost_per_1m_out": 0.24,
 351      "cost_per_1m_in_cached": 0,
 352      "cost_per_1m_out_cached": 0,
 353      "context_window": 40960,
 354      "default_max_tokens": 20480,
 355      "can_reason": true,
 356      "supports_attachments": false
 357    },
 358    {
 359      "id": "qwen/qwen3-32b",
 360      "model": "Qwen: Qwen3 32B",
 361      "cost_per_1m_in": 0.09999999999999999,
 362      "cost_per_1m_out": 0.3,
 363      "cost_per_1m_in_cached": 0,
 364      "cost_per_1m_out_cached": 0,
 365      "context_window": 40960,
 366      "default_max_tokens": 4096,
 367      "can_reason": true,
 368      "supports_attachments": false
 369    },
 370    {
 371      "id": "qwen/qwen3-235b-a22b",
 372      "model": "Qwen: Qwen3 235B A22B",
 373      "cost_per_1m_in": 0.13,
 374      "cost_per_1m_out": 0.6,
 375      "cost_per_1m_in_cached": 0,
 376      "cost_per_1m_out_cached": 0,
 377      "context_window": 40960,
 378      "default_max_tokens": 20480,
 379      "can_reason": true,
 380      "supports_attachments": false
 381    },
 382    {
 383      "id": "google/gemini-2.5-flash-preview",
 384      "model": "Google: Gemini 2.5 Flash Preview 04-17",
 385      "cost_per_1m_in": 0.15,
 386      "cost_per_1m_out": 0.6,
 387      "cost_per_1m_in_cached": 0.2333,
 388      "cost_per_1m_out_cached": 0.0375,
 389      "context_window": 1048576,
 390      "default_max_tokens": 32767,
 391      "can_reason": false,
 392      "supports_attachments": true
 393    },
 394    {
 395      "id": "google/gemini-2.5-flash-preview:thinking",
 396      "model": "Google: Gemini 2.5 Flash Preview 04-17 (thinking)",
 397      "cost_per_1m_in": 0.15,
 398      "cost_per_1m_out": 3.5,
 399      "cost_per_1m_in_cached": 0.2333,
 400      "cost_per_1m_out_cached": 0.0375,
 401      "context_window": 1048576,
 402      "default_max_tokens": 32767,
 403      "can_reason": false,
 404      "supports_attachments": true
 405    },
 406    {
 407      "id": "openai/o4-mini-high",
 408      "model": "OpenAI: o4 Mini High",
 409      "cost_per_1m_in": 1.1,
 410      "cost_per_1m_out": 4.4,
 411      "cost_per_1m_in_cached": 0,
 412      "cost_per_1m_out_cached": 0.275,
 413      "context_window": 200000,
 414      "default_max_tokens": 50000,
 415      "can_reason": false,
 416      "supports_attachments": true
 417    },
 418    {
 419      "id": "openai/o3",
 420      "model": "OpenAI: o3",
 421      "cost_per_1m_in": 2,
 422      "cost_per_1m_out": 8,
 423      "cost_per_1m_in_cached": 0,
 424      "cost_per_1m_out_cached": 0.5,
 425      "context_window": 200000,
 426      "default_max_tokens": 50000,
 427      "can_reason": false,
 428      "supports_attachments": true
 429    },
 430    {
 431      "id": "openai/o4-mini",
 432      "model": "OpenAI: o4 Mini",
 433      "cost_per_1m_in": 1.1,
 434      "cost_per_1m_out": 4.4,
 435      "cost_per_1m_in_cached": 0,
 436      "cost_per_1m_out_cached": 0.275,
 437      "context_window": 200000,
 438      "default_max_tokens": 50000,
 439      "can_reason": false,
 440      "supports_attachments": true
 441    },
 442    {
 443      "id": "openai/gpt-4.1",
 444      "model": "OpenAI: GPT-4.1",
 445      "cost_per_1m_in": 2,
 446      "cost_per_1m_out": 8,
 447      "cost_per_1m_in_cached": 0,
 448      "cost_per_1m_out_cached": 0.5,
 449      "context_window": 1047576,
 450      "default_max_tokens": 16384,
 451      "can_reason": false,
 452      "supports_attachments": true
 453    },
 454    {
 455      "id": "openai/gpt-4.1-mini",
 456      "model": "OpenAI: GPT-4.1 Mini",
 457      "cost_per_1m_in": 0.39999999999999997,
 458      "cost_per_1m_out": 1.5999999999999999,
 459      "cost_per_1m_in_cached": 0,
 460      "cost_per_1m_out_cached": 0.09999999999999999,
 461      "context_window": 1047576,
 462      "default_max_tokens": 16384,
 463      "can_reason": false,
 464      "supports_attachments": true
 465    },
 466    {
 467      "id": "openai/gpt-4.1-nano",
 468      "model": "OpenAI: GPT-4.1 Nano",
 469      "cost_per_1m_in": 0.09999999999999999,
 470      "cost_per_1m_out": 0.39999999999999997,
 471      "cost_per_1m_in_cached": 0,
 472      "cost_per_1m_out_cached": 0.024999999999999998,
 473      "context_window": 1047576,
 474      "default_max_tokens": 16384,
 475      "can_reason": false,
 476      "supports_attachments": true
 477    },
 478    {
 479      "id": "x-ai/grok-3-mini-beta",
 480      "model": "xAI: Grok 3 Mini Beta",
 481      "cost_per_1m_in": 0.3,
 482      "cost_per_1m_out": 0.5,
 483      "cost_per_1m_in_cached": 0,
 484      "cost_per_1m_out_cached": 0.075,
 485      "context_window": 131072,
 486      "default_max_tokens": 13107,
 487      "can_reason": true,
 488      "supports_attachments": false
 489    },
 490    {
 491      "id": "x-ai/grok-3-beta",
 492      "model": "xAI: Grok 3 Beta",
 493      "cost_per_1m_in": 3,
 494      "cost_per_1m_out": 15,
 495      "cost_per_1m_in_cached": 0,
 496      "cost_per_1m_out_cached": 0.75,
 497      "context_window": 131072,
 498      "default_max_tokens": 13107,
 499      "can_reason": false,
 500      "supports_attachments": false
 501    },
 502    {
 503      "id": "meta-llama/llama-4-maverick",
 504      "model": "Meta: Llama 4 Maverick",
 505      "cost_per_1m_in": 0.15,
 506      "cost_per_1m_out": 0.6,
 507      "cost_per_1m_in_cached": 0,
 508      "cost_per_1m_out_cached": 0,
 509      "context_window": 1048576,
 510      "default_max_tokens": 8192,
 511      "can_reason": false,
 512      "supports_attachments": true
 513    },
 514    {
 515      "id": "meta-llama/llama-4-scout",
 516      "model": "Meta: Llama 4 Scout",
 517      "cost_per_1m_in": 0.08,
 518      "cost_per_1m_out": 0.3,
 519      "cost_per_1m_in_cached": 0,
 520      "cost_per_1m_out_cached": 0,
 521      "context_window": 1048576,
 522      "default_max_tokens": 524288,
 523      "can_reason": false,
 524      "supports_attachments": true
 525    },
 526    {
 527      "id": "all-hands/openhands-lm-32b-v0.1",
 528      "model": "OpenHands LM 32B V0.1",
 529      "cost_per_1m_in": 2.6,
 530      "cost_per_1m_out": 3.4,
 531      "cost_per_1m_in_cached": 0,
 532      "cost_per_1m_out_cached": 0,
 533      "context_window": 16384,
 534      "default_max_tokens": 2048,
 535      "can_reason": false,
 536      "supports_attachments": false
 537    },
 538    {
 539      "id": "google/gemini-2.5-pro-exp-03-25",
 540      "model": "Google: Gemini 2.5 Pro Experimental",
 541      "cost_per_1m_in": 0,
 542      "cost_per_1m_out": 0,
 543      "cost_per_1m_in_cached": 0,
 544      "cost_per_1m_out_cached": 0,
 545      "context_window": 1048576,
 546      "default_max_tokens": 32767,
 547      "can_reason": false,
 548      "supports_attachments": true
 549    },
 550    {
 551      "id": "deepseek/deepseek-chat-v3-0324:free",
 552      "model": "DeepSeek: DeepSeek V3 0324 (free)",
 553      "cost_per_1m_in": 0,
 554      "cost_per_1m_out": 0,
 555      "cost_per_1m_in_cached": 0,
 556      "cost_per_1m_out_cached": 0,
 557      "context_window": 163840,
 558      "default_max_tokens": 16384,
 559      "can_reason": false,
 560      "supports_attachments": false
 561    },
 562    {
 563      "id": "deepseek/deepseek-chat-v3-0324",
 564      "model": "DeepSeek: DeepSeek V3 0324",
 565      "cost_per_1m_in": 0.28,
 566      "cost_per_1m_out": 0.88,
 567      "cost_per_1m_in_cached": 0,
 568      "cost_per_1m_out_cached": 0,
 569      "context_window": 163840,
 570      "default_max_tokens": 16384,
 571      "can_reason": false,
 572      "supports_attachments": false
 573    },
 574    {
 575      "id": "mistralai/mistral-small-3.1-24b-instruct:free",
 576      "model": "Mistral: Mistral Small 3.1 24B (free)",
 577      "cost_per_1m_in": 0,
 578      "cost_per_1m_out": 0,
 579      "cost_per_1m_in_cached": 0,
 580      "cost_per_1m_out_cached": 0,
 581      "context_window": 96000,
 582      "default_max_tokens": 48000,
 583      "can_reason": false,
 584      "supports_attachments": true
 585    },
 586    {
 587      "id": "mistralai/mistral-small-3.1-24b-instruct",
 588      "model": "Mistral: Mistral Small 3.1 24B",
 589      "cost_per_1m_in": 0.049999999999999996,
 590      "cost_per_1m_out": 0.09999999999999999,
 591      "cost_per_1m_in_cached": 0,
 592      "cost_per_1m_out_cached": 0,
 593      "context_window": 128000,
 594      "default_max_tokens": 12800,
 595      "can_reason": false,
 596      "supports_attachments": true
 597    },
 598    {
 599      "id": "ai21/jamba-1.6-large",
 600      "model": "AI21: Jamba 1.6 Large",
 601      "cost_per_1m_in": 2,
 602      "cost_per_1m_out": 8,
 603      "cost_per_1m_in_cached": 0,
 604      "cost_per_1m_out_cached": 0,
 605      "context_window": 256000,
 606      "default_max_tokens": 2048,
 607      "can_reason": false,
 608      "supports_attachments": false
 609    },
 610    {
 611      "id": "ai21/jamba-1.6-mini",
 612      "model": "AI21: Jamba Mini 1.6",
 613      "cost_per_1m_in": 0.19999999999999998,
 614      "cost_per_1m_out": 0.39999999999999997,
 615      "cost_per_1m_in_cached": 0,
 616      "cost_per_1m_out_cached": 0,
 617      "context_window": 256000,
 618      "default_max_tokens": 2048,
 619      "can_reason": false,
 620      "supports_attachments": false
 621    },
 622    {
 623      "id": "openai/gpt-4.5-preview",
 624      "model": "OpenAI: GPT-4.5 (Preview)",
 625      "cost_per_1m_in": 75,
 626      "cost_per_1m_out": 150,
 627      "cost_per_1m_in_cached": 0,
 628      "cost_per_1m_out_cached": 37.5,
 629      "context_window": 128000,
 630      "default_max_tokens": 8192,
 631      "can_reason": false,
 632      "supports_attachments": true
 633    },
 634    {
 635      "id": "google/gemini-2.0-flash-lite-001",
 636      "model": "Google: Gemini 2.0 Flash Lite",
 637      "cost_per_1m_in": 0.075,
 638      "cost_per_1m_out": 0.3,
 639      "cost_per_1m_in_cached": 0,
 640      "cost_per_1m_out_cached": 0,
 641      "context_window": 1048576,
 642      "default_max_tokens": 4096,
 643      "can_reason": false,
 644      "supports_attachments": true
 645    },
 646    {
 647      "id": "anthropic/claude-3.7-sonnet",
 648      "model": "Anthropic: Claude 3.7 Sonnet",
 649      "cost_per_1m_in": 3,
 650      "cost_per_1m_out": 15,
 651      "cost_per_1m_in_cached": 3.75,
 652      "cost_per_1m_out_cached": 0.3,
 653      "context_window": 200000,
 654      "default_max_tokens": 32000,
 655      "can_reason": true,
 656      "supports_attachments": true
 657    },
 658    {
 659      "id": "anthropic/claude-3.7-sonnet:beta",
 660      "model": "Anthropic: Claude 3.7 Sonnet (self-moderated)",
 661      "cost_per_1m_in": 3,
 662      "cost_per_1m_out": 15,
 663      "cost_per_1m_in_cached": 3.75,
 664      "cost_per_1m_out_cached": 0.3,
 665      "context_window": 200000,
 666      "default_max_tokens": 64000,
 667      "can_reason": true,
 668      "supports_attachments": true
 669    },
 670    {
 671      "id": "anthropic/claude-3.7-sonnet:thinking",
 672      "model": "Anthropic: Claude 3.7 Sonnet (thinking)",
 673      "cost_per_1m_in": 3,
 674      "cost_per_1m_out": 15,
 675      "cost_per_1m_in_cached": 3.75,
 676      "cost_per_1m_out_cached": 0.3,
 677      "context_window": 200000,
 678      "default_max_tokens": 64000,
 679      "can_reason": true,
 680      "supports_attachments": true
 681    },
 682    {
 683      "id": "mistralai/mistral-saba",
 684      "model": "Mistral: Saba",
 685      "cost_per_1m_in": 0.19999999999999998,
 686      "cost_per_1m_out": 0.6,
 687      "cost_per_1m_in_cached": 0,
 688      "cost_per_1m_out_cached": 0,
 689      "context_window": 32768,
 690      "default_max_tokens": 3276,
 691      "can_reason": false,
 692      "supports_attachments": false
 693    },
 694    {
 695      "id": "openai/o3-mini-high",
 696      "model": "OpenAI: o3 Mini High",
 697      "cost_per_1m_in": 1.1,
 698      "cost_per_1m_out": 4.4,
 699      "cost_per_1m_in_cached": 0,
 700      "cost_per_1m_out_cached": 0.55,
 701      "context_window": 200000,
 702      "default_max_tokens": 50000,
 703      "can_reason": false,
 704      "supports_attachments": false
 705    },
 706    {
 707      "id": "google/gemini-2.0-flash-001",
 708      "model": "Google: Gemini 2.0 Flash",
 709      "cost_per_1m_in": 0.09999999999999999,
 710      "cost_per_1m_out": 0.39999999999999997,
 711      "cost_per_1m_in_cached": 0.18330000000000002,
 712      "cost_per_1m_out_cached": 0.024999999999999998,
 713      "context_window": 1048576,
 714      "default_max_tokens": 4096,
 715      "can_reason": false,
 716      "supports_attachments": true
 717    },
 718    {
 719      "id": "qwen/qwen-turbo",
 720      "model": "Qwen: Qwen-Turbo",
 721      "cost_per_1m_in": 0.049999999999999996,
 722      "cost_per_1m_out": 0.19999999999999998,
 723      "cost_per_1m_in_cached": 0,
 724      "cost_per_1m_out_cached": 0.02,
 725      "context_window": 1000000,
 726      "default_max_tokens": 4096,
 727      "can_reason": false,
 728      "supports_attachments": false
 729    },
 730    {
 731      "id": "qwen/qwen-plus",
 732      "model": "Qwen: Qwen-Plus",
 733      "cost_per_1m_in": 0.39999999999999997,
 734      "cost_per_1m_out": 1.2,
 735      "cost_per_1m_in_cached": 0,
 736      "cost_per_1m_out_cached": 0.16,
 737      "context_window": 131072,
 738      "default_max_tokens": 4096,
 739      "can_reason": false,
 740      "supports_attachments": false
 741    },
 742    {
 743      "id": "qwen/qwen-max",
 744      "model": "Qwen: Qwen-Max ",
 745      "cost_per_1m_in": 1.5999999999999999,
 746      "cost_per_1m_out": 6.3999999999999995,
 747      "cost_per_1m_in_cached": 0,
 748      "cost_per_1m_out_cached": 0.64,
 749      "context_window": 32768,
 750      "default_max_tokens": 4096,
 751      "can_reason": false,
 752      "supports_attachments": false
 753    },
 754    {
 755      "id": "openai/o3-mini",
 756      "model": "OpenAI: o3 Mini",
 757      "cost_per_1m_in": 1.1,
 758      "cost_per_1m_out": 4.4,
 759      "cost_per_1m_in_cached": 0,
 760      "cost_per_1m_out_cached": 0.55,
 761      "context_window": 200000,
 762      "default_max_tokens": 50000,
 763      "can_reason": false,
 764      "supports_attachments": false
 765    },
 766    {
 767      "id": "mistralai/mistral-small-24b-instruct-2501",
 768      "model": "Mistral: Mistral Small 3",
 769      "cost_per_1m_in": 0.049999999999999996,
 770      "cost_per_1m_out": 0.09,
 771      "cost_per_1m_in_cached": 0,
 772      "cost_per_1m_out_cached": 0,
 773      "context_window": 32768,
 774      "default_max_tokens": 16384,
 775      "can_reason": false,
 776      "supports_attachments": false
 777    },
 778    {
 779      "id": "deepseek/deepseek-r1-distill-llama-70b",
 780      "model": "DeepSeek: R1 Distill Llama 70B",
 781      "cost_per_1m_in": 0.09999999999999999,
 782      "cost_per_1m_out": 0.39999999999999997,
 783      "cost_per_1m_in_cached": 0,
 784      "cost_per_1m_out_cached": 0,
 785      "context_window": 131072,
 786      "default_max_tokens": 8192,
 787      "can_reason": true,
 788      "supports_attachments": false
 789    },
 790    {
 791      "id": "deepseek/deepseek-r1",
 792      "model": "DeepSeek: R1",
 793      "cost_per_1m_in": 0.44999999999999996,
 794      "cost_per_1m_out": 2.1500000000000004,
 795      "cost_per_1m_in_cached": 0,
 796      "cost_per_1m_out_cached": 0,
 797      "context_window": 128000,
 798      "default_max_tokens": 16384,
 799      "can_reason": true,
 800      "supports_attachments": false
 801    },
 802    {
 803      "id": "mistralai/codestral-2501",
 804      "model": "Mistral: Codestral 2501",
 805      "cost_per_1m_in": 0.3,
 806      "cost_per_1m_out": 0.8999999999999999,
 807      "cost_per_1m_in_cached": 0,
 808      "cost_per_1m_out_cached": 0,
 809      "context_window": 262144,
 810      "default_max_tokens": 26214,
 811      "can_reason": false,
 812      "supports_attachments": false
 813    },
 814    {
 815      "id": "deepseek/deepseek-chat",
 816      "model": "DeepSeek: DeepSeek V3",
 817      "cost_per_1m_in": 0.38,
 818      "cost_per_1m_out": 0.8899999999999999,
 819      "cost_per_1m_in_cached": 0,
 820      "cost_per_1m_out_cached": 0,
 821      "context_window": 163840,
 822      "default_max_tokens": 81920,
 823      "can_reason": false,
 824      "supports_attachments": false
 825    },
 826    {
 827      "id": "openai/o1",
 828      "model": "OpenAI: o1",
 829      "cost_per_1m_in": 15,
 830      "cost_per_1m_out": 60,
 831      "cost_per_1m_in_cached": 0,
 832      "cost_per_1m_out_cached": 7.5,
 833      "context_window": 200000,
 834      "default_max_tokens": 50000,
 835      "can_reason": false,
 836      "supports_attachments": true
 837    },
 838    {
 839      "id": "x-ai/grok-2-1212",
 840      "model": "xAI: Grok 2 1212",
 841      "cost_per_1m_in": 2,
 842      "cost_per_1m_out": 10,
 843      "cost_per_1m_in_cached": 0,
 844      "cost_per_1m_out_cached": 0,
 845      "context_window": 131072,
 846      "default_max_tokens": 13107,
 847      "can_reason": false,
 848      "supports_attachments": false
 849    },
 850    {
 851      "id": "meta-llama/llama-3.3-70b-instruct",
 852      "model": "Meta: Llama 3.3 70B Instruct",
 853      "cost_per_1m_in": 0.049999999999999996,
 854      "cost_per_1m_out": 0.16999999999999998,
 855      "cost_per_1m_in_cached": 0,
 856      "cost_per_1m_out_cached": 0,
 857      "context_window": 131000,
 858      "default_max_tokens": 65500,
 859      "can_reason": false,
 860      "supports_attachments": false
 861    },
 862    {
 863      "id": "amazon/nova-lite-v1",
 864      "model": "Amazon: Nova Lite 1.0",
 865      "cost_per_1m_in": 0.06,
 866      "cost_per_1m_out": 0.24,
 867      "cost_per_1m_in_cached": 0,
 868      "cost_per_1m_out_cached": 0,
 869      "context_window": 300000,
 870      "default_max_tokens": 2560,
 871      "can_reason": false,
 872      "supports_attachments": true
 873    },
 874    {
 875      "id": "amazon/nova-micro-v1",
 876      "model": "Amazon: Nova Micro 1.0",
 877      "cost_per_1m_in": 0.035,
 878      "cost_per_1m_out": 0.14,
 879      "cost_per_1m_in_cached": 0,
 880      "cost_per_1m_out_cached": 0,
 881      "context_window": 128000,
 882      "default_max_tokens": 2560,
 883      "can_reason": false,
 884      "supports_attachments": false
 885    },
 886    {
 887      "id": "amazon/nova-pro-v1",
 888      "model": "Amazon: Nova Pro 1.0",
 889      "cost_per_1m_in": 0.7999999999999999,
 890      "cost_per_1m_out": 3.1999999999999997,
 891      "cost_per_1m_in_cached": 0,
 892      "cost_per_1m_out_cached": 0,
 893      "context_window": 300000,
 894      "default_max_tokens": 2560,
 895      "can_reason": false,
 896      "supports_attachments": true
 897    },
 898    {
 899      "id": "openai/gpt-4o-2024-11-20",
 900      "model": "OpenAI: GPT-4o (2024-11-20)",
 901      "cost_per_1m_in": 2.5,
 902      "cost_per_1m_out": 10,
 903      "cost_per_1m_in_cached": 0,
 904      "cost_per_1m_out_cached": 1.25,
 905      "context_window": 128000,
 906      "default_max_tokens": 8192,
 907      "can_reason": false,
 908      "supports_attachments": true
 909    },
 910    {
 911      "id": "mistralai/mistral-large-2411",
 912      "model": "Mistral Large 2411",
 913      "cost_per_1m_in": 2,
 914      "cost_per_1m_out": 6,
 915      "cost_per_1m_in_cached": 0,
 916      "cost_per_1m_out_cached": 0,
 917      "context_window": 131072,
 918      "default_max_tokens": 13107,
 919      "can_reason": false,
 920      "supports_attachments": false
 921    },
 922    {
 923      "id": "mistralai/mistral-large-2407",
 924      "model": "Mistral Large 2407",
 925      "cost_per_1m_in": 2,
 926      "cost_per_1m_out": 6,
 927      "cost_per_1m_in_cached": 0,
 928      "cost_per_1m_out_cached": 0,
 929      "context_window": 131072,
 930      "default_max_tokens": 13107,
 931      "can_reason": false,
 932      "supports_attachments": false
 933    },
 934    {
 935      "id": "mistralai/pixtral-large-2411",
 936      "model": "Mistral: Pixtral Large 2411",
 937      "cost_per_1m_in": 2,
 938      "cost_per_1m_out": 6,
 939      "cost_per_1m_in_cached": 0,
 940      "cost_per_1m_out_cached": 0,
 941      "context_window": 32768,
 942      "default_max_tokens": 3276,
 943      "can_reason": false,
 944      "supports_attachments": true
 945    },
 946    {
 947      "id": "thedrummer/unslopnemo-12b",
 948      "model": "TheDrummer: UnslopNemo 12B",
 949      "cost_per_1m_in": 0.39999999999999997,
 950      "cost_per_1m_out": 0.39999999999999997,
 951      "cost_per_1m_in_cached": 0,
 952      "cost_per_1m_out_cached": 0,
 953      "context_window": 32768,
 954      "default_max_tokens": 3276,
 955      "can_reason": false,
 956      "supports_attachments": false
 957    },
 958    {
 959      "id": "anthropic/claude-3.5-haiku:beta",
 960      "model": "Anthropic: Claude 3.5 Haiku (self-moderated)",
 961      "cost_per_1m_in": 0.7999999999999999,
 962      "cost_per_1m_out": 4,
 963      "cost_per_1m_in_cached": 1,
 964      "cost_per_1m_out_cached": 0.08,
 965      "context_window": 200000,
 966      "default_max_tokens": 4096,
 967      "can_reason": false,
 968      "supports_attachments": true
 969    },
 970    {
 971      "id": "anthropic/claude-3.5-haiku",
 972      "model": "Anthropic: Claude 3.5 Haiku",
 973      "cost_per_1m_in": 0.7999999999999999,
 974      "cost_per_1m_out": 4,
 975      "cost_per_1m_in_cached": 1,
 976      "cost_per_1m_out_cached": 0.08,
 977      "context_window": 200000,
 978      "default_max_tokens": 4096,
 979      "can_reason": false,
 980      "supports_attachments": true
 981    },
 982    {
 983      "id": "anthropic/claude-3.5-haiku-20241022:beta",
 984      "model": "Anthropic: Claude 3.5 Haiku (2024-10-22) (self-moderated)",
 985      "cost_per_1m_in": 0.7999999999999999,
 986      "cost_per_1m_out": 4,
 987      "cost_per_1m_in_cached": 1,
 988      "cost_per_1m_out_cached": 0.08,
 989      "context_window": 200000,
 990      "default_max_tokens": 4096,
 991      "can_reason": false,
 992      "supports_attachments": true
 993    },
 994    {
 995      "id": "anthropic/claude-3.5-haiku-20241022",
 996      "model": "Anthropic: Claude 3.5 Haiku (2024-10-22)",
 997      "cost_per_1m_in": 0.7999999999999999,
 998      "cost_per_1m_out": 4,
 999      "cost_per_1m_in_cached": 1,
1000      "cost_per_1m_out_cached": 0.08,
1001      "context_window": 200000,
1002      "default_max_tokens": 4096,
1003      "can_reason": false,
1004      "supports_attachments": true
1005    },
1006    {
1007      "id": "anthropic/claude-3.5-sonnet:beta",
1008      "model": "Anthropic: Claude 3.5 Sonnet (self-moderated)",
1009      "cost_per_1m_in": 3,
1010      "cost_per_1m_out": 15,
1011      "cost_per_1m_in_cached": 3.75,
1012      "cost_per_1m_out_cached": 0.3,
1013      "context_window": 200000,
1014      "default_max_tokens": 4096,
1015      "can_reason": false,
1016      "supports_attachments": true
1017    },
1018    {
1019      "id": "anthropic/claude-3.5-sonnet",
1020      "model": "Anthropic: Claude 3.5 Sonnet",
1021      "cost_per_1m_in": 3,
1022      "cost_per_1m_out": 15,
1023      "cost_per_1m_in_cached": 3.75,
1024      "cost_per_1m_out_cached": 0.3,
1025      "context_window": 200000,
1026      "default_max_tokens": 4096,
1027      "can_reason": false,
1028      "supports_attachments": true
1029    },
1030    {
1031      "id": "x-ai/grok-beta",
1032      "model": "xAI: Grok Beta",
1033      "cost_per_1m_in": 5,
1034      "cost_per_1m_out": 15,
1035      "cost_per_1m_in_cached": 0,
1036      "cost_per_1m_out_cached": 0,
1037      "context_window": 131072,
1038      "default_max_tokens": 13107,
1039      "can_reason": false,
1040      "supports_attachments": false
1041    },
1042    {
1043      "id": "mistralai/ministral-8b",
1044      "model": "Mistral: Ministral 8B",
1045      "cost_per_1m_in": 0.09999999999999999,
1046      "cost_per_1m_out": 0.09999999999999999,
1047      "cost_per_1m_in_cached": 0,
1048      "cost_per_1m_out_cached": 0,
1049      "context_window": 128000,
1050      "default_max_tokens": 12800,
1051      "can_reason": false,
1052      "supports_attachments": false
1053    },
1054    {
1055      "id": "mistralai/ministral-3b",
1056      "model": "Mistral: Ministral 3B",
1057      "cost_per_1m_in": 0.04,
1058      "cost_per_1m_out": 0.04,
1059      "cost_per_1m_in_cached": 0,
1060      "cost_per_1m_out_cached": 0,
1061      "context_window": 131072,
1062      "default_max_tokens": 13107,
1063      "can_reason": false,
1064      "supports_attachments": false
1065    },
1066    {
1067      "id": "nvidia/llama-3.1-nemotron-70b-instruct",
1068      "model": "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
1069      "cost_per_1m_in": 0.12,
1070      "cost_per_1m_out": 0.3,
1071      "cost_per_1m_in_cached": 0,
1072      "cost_per_1m_out_cached": 0,
1073      "context_window": 131072,
1074      "default_max_tokens": 65536,
1075      "can_reason": false,
1076      "supports_attachments": false
1077    },
1078    {
1079      "id": "google/gemini-flash-1.5-8b",
1080      "model": "Google: Gemini 1.5 Flash 8B",
1081      "cost_per_1m_in": 0.0375,
1082      "cost_per_1m_out": 0.15,
1083      "cost_per_1m_in_cached": 0.0583,
1084      "cost_per_1m_out_cached": 0.01,
1085      "context_window": 1000000,
1086      "default_max_tokens": 4096,
1087      "can_reason": false,
1088      "supports_attachments": true
1089    },
1090    {
1091      "id": "meta-llama/llama-3.2-11b-vision-instruct",
1092      "model": "Meta: Llama 3.2 11B Vision Instruct",
1093      "cost_per_1m_in": 0.049,
1094      "cost_per_1m_out": 0.049,
1095      "cost_per_1m_in_cached": 0,
1096      "cost_per_1m_out_cached": 0,
1097      "context_window": 131072,
1098      "default_max_tokens": 8192,
1099      "can_reason": false,
1100      "supports_attachments": true
1101    },
1102    {
1103      "id": "meta-llama/llama-3.2-3b-instruct",
1104      "model": "Meta: Llama 3.2 3B Instruct",
1105      "cost_per_1m_in": 0.003,
1106      "cost_per_1m_out": 0.006,
1107      "cost_per_1m_in_cached": 0,
1108      "cost_per_1m_out_cached": 0,
1109      "context_window": 20000,
1110      "default_max_tokens": 10000,
1111      "can_reason": false,
1112      "supports_attachments": false
1113    },
1114    {
1115      "id": "qwen/qwen-2.5-72b-instruct",
1116      "model": "Qwen2.5 72B Instruct",
1117      "cost_per_1m_in": 0.12,
1118      "cost_per_1m_out": 0.39,
1119      "cost_per_1m_in_cached": 0,
1120      "cost_per_1m_out_cached": 0,
1121      "context_window": 32768,
1122      "default_max_tokens": 8192,
1123      "can_reason": false,
1124      "supports_attachments": false
1125    },
1126    {
1127      "id": "mistralai/pixtral-12b",
1128      "model": "Mistral: Pixtral 12B",
1129      "cost_per_1m_in": 0.09999999999999999,
1130      "cost_per_1m_out": 0.09999999999999999,
1131      "cost_per_1m_in_cached": 0,
1132      "cost_per_1m_out_cached": 0,
1133      "context_window": 32768,
1134      "default_max_tokens": 3276,
1135      "can_reason": false,
1136      "supports_attachments": true
1137    },
1138    {
1139      "id": "cohere/command-r-plus-08-2024",
1140      "model": "Cohere: Command R+ (08-2024)",
1141      "cost_per_1m_in": 2.5,
1142      "cost_per_1m_out": 10,
1143      "cost_per_1m_in_cached": 0,
1144      "cost_per_1m_out_cached": 0,
1145      "context_window": 128000,
1146      "default_max_tokens": 2000,
1147      "can_reason": false,
1148      "supports_attachments": false
1149    },
1150    {
1151      "id": "cohere/command-r-08-2024",
1152      "model": "Cohere: Command R (08-2024)",
1153      "cost_per_1m_in": 0.15,
1154      "cost_per_1m_out": 0.6,
1155      "cost_per_1m_in_cached": 0,
1156      "cost_per_1m_out_cached": 0,
1157      "context_window": 128000,
1158      "default_max_tokens": 2000,
1159      "can_reason": false,
1160      "supports_attachments": false
1161    },
1162    {
1163      "id": "microsoft/phi-3.5-mini-128k-instruct",
1164      "model": "Microsoft: Phi-3.5 Mini 128K Instruct",
1165      "cost_per_1m_in": 0.09999999999999999,
1166      "cost_per_1m_out": 0.09999999999999999,
1167      "cost_per_1m_in_cached": 0,
1168      "cost_per_1m_out_cached": 0,
1169      "context_window": 128000,
1170      "default_max_tokens": 12800,
1171      "can_reason": false,
1172      "supports_attachments": false
1173    },
1174    {
1175      "id": "nousresearch/hermes-3-llama-3.1-70b",
1176      "model": "Nous: Hermes 3 70B Instruct",
1177      "cost_per_1m_in": 0.12,
1178      "cost_per_1m_out": 0.3,
1179      "cost_per_1m_in_cached": 0,
1180      "cost_per_1m_out_cached": 0,
1181      "context_window": 131072,
1182      "default_max_tokens": 65536,
1183      "can_reason": false,
1184      "supports_attachments": false
1185    },
1186    {
1187      "id": "openai/gpt-4o-2024-08-06",
1188      "model": "OpenAI: GPT-4o (2024-08-06)",
1189      "cost_per_1m_in": 2.5,
1190      "cost_per_1m_out": 10,
1191      "cost_per_1m_in_cached": 0,
1192      "cost_per_1m_out_cached": 1.25,
1193      "context_window": 128000,
1194      "default_max_tokens": 8192,
1195      "can_reason": false,
1196      "supports_attachments": true
1197    },
1198    {
1199      "id": "meta-llama/llama-3.1-405b-instruct",
1200      "model": "Meta: Llama 3.1 405B Instruct",
1201      "cost_per_1m_in": 0.7999999999999999,
1202      "cost_per_1m_out": 0.7999999999999999,
1203      "cost_per_1m_in_cached": 0,
1204      "cost_per_1m_out_cached": 0,
1205      "context_window": 32768,
1206      "default_max_tokens": 8192,
1207      "can_reason": false,
1208      "supports_attachments": false
1209    },
1210    {
1211      "id": "meta-llama/llama-3.1-70b-instruct",
1212      "model": "Meta: Llama 3.1 70B Instruct",
1213      "cost_per_1m_in": 0.09999999999999999,
1214      "cost_per_1m_out": 0.28,
1215      "cost_per_1m_in_cached": 0,
1216      "cost_per_1m_out_cached": 0,
1217      "context_window": 131072,
1218      "default_max_tokens": 8192,
1219      "can_reason": false,
1220      "supports_attachments": false
1221    },
1222    {
1223      "id": "meta-llama/llama-3.1-8b-instruct",
1224      "model": "Meta: Llama 3.1 8B Instruct",
1225      "cost_per_1m_in": 0.016,
1226      "cost_per_1m_out": 0.020999999999999998,
1227      "cost_per_1m_in_cached": 0,
1228      "cost_per_1m_out_cached": 0,
1229      "context_window": 131000,
1230      "default_max_tokens": 65500,
1231      "can_reason": false,
1232      "supports_attachments": false
1233    },
1234    {
1235      "id": "mistralai/mistral-nemo",
1236      "model": "Mistral: Mistral Nemo",
1237      "cost_per_1m_in": 0.01,
1238      "cost_per_1m_out": 0.011,
1239      "cost_per_1m_in_cached": 0,
1240      "cost_per_1m_out_cached": 0,
1241      "context_window": 131072,
1242      "default_max_tokens": 65536,
1243      "can_reason": false,
1244      "supports_attachments": false
1245    },
1246    {
1247      "id": "openai/gpt-4o-mini",
1248      "model": "OpenAI: GPT-4o-mini",
1249      "cost_per_1m_in": 0.15,
1250      "cost_per_1m_out": 0.6,
1251      "cost_per_1m_in_cached": 0,
1252      "cost_per_1m_out_cached": 0.075,
1253      "context_window": 128000,
1254      "default_max_tokens": 8192,
1255      "can_reason": false,
1256      "supports_attachments": true
1257    },
1258    {
1259      "id": "openai/gpt-4o-mini-2024-07-18",
1260      "model": "OpenAI: GPT-4o-mini (2024-07-18)",
1261      "cost_per_1m_in": 0.15,
1262      "cost_per_1m_out": 0.6,
1263      "cost_per_1m_in_cached": 0,
1264      "cost_per_1m_out_cached": 0.075,
1265      "context_window": 128000,
1266      "default_max_tokens": 8192,
1267      "can_reason": false,
1268      "supports_attachments": true
1269    },
1270    {
1271      "id": "anthropic/claude-3.5-sonnet-20240620:beta",
1272      "model": "Anthropic: Claude 3.5 Sonnet (2024-06-20) (self-moderated)",
1273      "cost_per_1m_in": 3,
1274      "cost_per_1m_out": 15,
1275      "cost_per_1m_in_cached": 3.75,
1276      "cost_per_1m_out_cached": 0.3,
1277      "context_window": 200000,
1278      "default_max_tokens": 4096,
1279      "can_reason": false,
1280      "supports_attachments": true
1281    },
1282    {
1283      "id": "anthropic/claude-3.5-sonnet-20240620",
1284      "model": "Anthropic: Claude 3.5 Sonnet (2024-06-20)",
1285      "cost_per_1m_in": 3,
1286      "cost_per_1m_out": 15,
1287      "cost_per_1m_in_cached": 3.75,
1288      "cost_per_1m_out_cached": 0.3,
1289      "context_window": 200000,
1290      "default_max_tokens": 4096,
1291      "can_reason": false,
1292      "supports_attachments": true
1293    },
1294    {
1295      "id": "mistralai/mistral-7b-instruct-v0.3",
1296      "model": "Mistral: Mistral 7B Instruct v0.3",
1297      "cost_per_1m_in": 0.028,
1298      "cost_per_1m_out": 0.054,
1299      "cost_per_1m_in_cached": 0,
1300      "cost_per_1m_out_cached": 0,
1301      "context_window": 32768,
1302      "default_max_tokens": 8192,
1303      "can_reason": false,
1304      "supports_attachments": false
1305    },
1306    {
1307      "id": "mistralai/mistral-7b-instruct:free",
1308      "model": "Mistral: Mistral 7B Instruct (free)",
1309      "cost_per_1m_in": 0,
1310      "cost_per_1m_out": 0,
1311      "cost_per_1m_in_cached": 0,
1312      "cost_per_1m_out_cached": 0,
1313      "context_window": 32768,
1314      "default_max_tokens": 8192,
1315      "can_reason": false,
1316      "supports_attachments": false
1317    },
1318    {
1319      "id": "mistralai/mistral-7b-instruct",
1320      "model": "Mistral: Mistral 7B Instruct",
1321      "cost_per_1m_in": 0.028,
1322      "cost_per_1m_out": 0.054,
1323      "cost_per_1m_in_cached": 0,
1324      "cost_per_1m_out_cached": 0,
1325      "context_window": 32768,
1326      "default_max_tokens": 8192,
1327      "can_reason": false,
1328      "supports_attachments": false
1329    },
1330    {
1331      "id": "microsoft/phi-3-mini-128k-instruct",
1332      "model": "Microsoft: Phi-3 Mini 128K Instruct",
1333      "cost_per_1m_in": 0.09999999999999999,
1334      "cost_per_1m_out": 0.09999999999999999,
1335      "cost_per_1m_in_cached": 0,
1336      "cost_per_1m_out_cached": 0,
1337      "context_window": 128000,
1338      "default_max_tokens": 12800,
1339      "can_reason": false,
1340      "supports_attachments": false
1341    },
1342    {
1343      "id": "microsoft/phi-3-medium-128k-instruct",
1344      "model": "Microsoft: Phi-3 Medium 128K Instruct",
1345      "cost_per_1m_in": 1,
1346      "cost_per_1m_out": 1,
1347      "cost_per_1m_in_cached": 0,
1348      "cost_per_1m_out_cached": 0,
1349      "context_window": 128000,
1350      "default_max_tokens": 12800,
1351      "can_reason": false,
1352      "supports_attachments": false
1353    },
1354    {
1355      "id": "google/gemini-flash-1.5",
1356      "model": "Google: Gemini 1.5 Flash ",
1357      "cost_per_1m_in": 0.075,
1358      "cost_per_1m_out": 0.3,
1359      "cost_per_1m_in_cached": 0.1583,
1360      "cost_per_1m_out_cached": 0.01875,
1361      "context_window": 1000000,
1362      "default_max_tokens": 4096,
1363      "can_reason": false,
1364      "supports_attachments": true
1365    },
1366    {
1367      "id": "openai/gpt-4o-2024-05-13",
1368      "model": "OpenAI: GPT-4o (2024-05-13)",
1369      "cost_per_1m_in": 5,
1370      "cost_per_1m_out": 15,
1371      "cost_per_1m_in_cached": 0,
1372      "cost_per_1m_out_cached": 0,
1373      "context_window": 128000,
1374      "default_max_tokens": 2048,
1375      "can_reason": false,
1376      "supports_attachments": true
1377    },
1378    {
1379      "id": "openai/gpt-4o",
1380      "model": "OpenAI: GPT-4o",
1381      "cost_per_1m_in": 2.5,
1382      "cost_per_1m_out": 10,
1383      "cost_per_1m_in_cached": 0,
1384      "cost_per_1m_out_cached": 1.25,
1385      "context_window": 128000,
1386      "default_max_tokens": 8192,
1387      "can_reason": false,
1388      "supports_attachments": true
1389    },
1390    {
1391      "id": "openai/gpt-4o:extended",
1392      "model": "OpenAI: GPT-4o (extended)",
1393      "cost_per_1m_in": 6,
1394      "cost_per_1m_out": 18,
1395      "cost_per_1m_in_cached": 0,
1396      "cost_per_1m_out_cached": 0,
1397      "context_window": 128000,
1398      "default_max_tokens": 32000,
1399      "can_reason": false,
1400      "supports_attachments": true
1401    },
1402    {
1403      "id": "meta-llama/llama-3-8b-instruct",
1404      "model": "Meta: Llama 3 8B Instruct",
1405      "cost_per_1m_in": 0.03,
1406      "cost_per_1m_out": 0.06,
1407      "cost_per_1m_in_cached": 0,
1408      "cost_per_1m_out_cached": 0,
1409      "context_window": 8192,
1410      "default_max_tokens": 8192,
1411      "can_reason": false,
1412      "supports_attachments": false
1413    },
1414    {
1415      "id": "meta-llama/llama-3-70b-instruct",
1416      "model": "Meta: Llama 3 70B Instruct",
1417      "cost_per_1m_in": 0.3,
1418      "cost_per_1m_out": 0.39999999999999997,
1419      "cost_per_1m_in_cached": 0,
1420      "cost_per_1m_out_cached": 0,
1421      "context_window": 8192,
1422      "default_max_tokens": 8192,
1423      "can_reason": false,
1424      "supports_attachments": false
1425    },
1426    {
1427      "id": "mistralai/mixtral-8x22b-instruct",
1428      "model": "Mistral: Mixtral 8x22B Instruct",
1429      "cost_per_1m_in": 0.8999999999999999,
1430      "cost_per_1m_out": 0.8999999999999999,
1431      "cost_per_1m_in_cached": 0,
1432      "cost_per_1m_out_cached": 0,
1433      "context_window": 65536,
1434      "default_max_tokens": 6553,
1435      "can_reason": false,
1436      "supports_attachments": false
1437    },
1438    {
1439      "id": "openai/gpt-4-turbo",
1440      "model": "OpenAI: GPT-4 Turbo",
1441      "cost_per_1m_in": 10,
1442      "cost_per_1m_out": 30,
1443      "cost_per_1m_in_cached": 0,
1444      "cost_per_1m_out_cached": 0,
1445      "context_window": 128000,
1446      "default_max_tokens": 2048,
1447      "can_reason": false,
1448      "supports_attachments": true
1449    },
1450    {
1451      "id": "google/gemini-pro-1.5",
1452      "model": "Google: Gemini 1.5 Pro",
1453      "cost_per_1m_in": 1.25,
1454      "cost_per_1m_out": 5,
1455      "cost_per_1m_in_cached": 0,
1456      "cost_per_1m_out_cached": 0,
1457      "context_window": 2000000,
1458      "default_max_tokens": 4096,
1459      "can_reason": false,
1460      "supports_attachments": true
1461    },
1462    {
1463      "id": "cohere/command-r-plus",
1464      "model": "Cohere: Command R+",
1465      "cost_per_1m_in": 3,
1466      "cost_per_1m_out": 15,
1467      "cost_per_1m_in_cached": 0,
1468      "cost_per_1m_out_cached": 0,
1469      "context_window": 128000,
1470      "default_max_tokens": 2000,
1471      "can_reason": false,
1472      "supports_attachments": false
1473    },
1474    {
1475      "id": "cohere/command-r-plus-04-2024",
1476      "model": "Cohere: Command R+ (04-2024)",
1477      "cost_per_1m_in": 3,
1478      "cost_per_1m_out": 15,
1479      "cost_per_1m_in_cached": 0,
1480      "cost_per_1m_out_cached": 0,
1481      "context_window": 128000,
1482      "default_max_tokens": 2000,
1483      "can_reason": false,
1484      "supports_attachments": false
1485    },
1486    {
1487      "id": "cohere/command-r",
1488      "model": "Cohere: Command R",
1489      "cost_per_1m_in": 0.5,
1490      "cost_per_1m_out": 1.5,
1491      "cost_per_1m_in_cached": 0,
1492      "cost_per_1m_out_cached": 0,
1493      "context_window": 128000,
1494      "default_max_tokens": 2000,
1495      "can_reason": false,
1496      "supports_attachments": false
1497    },
1498    {
1499      "id": "anthropic/claude-3-haiku:beta",
1500      "model": "Anthropic: Claude 3 Haiku (self-moderated)",
1501      "cost_per_1m_in": 0.25,
1502      "cost_per_1m_out": 1.25,
1503      "cost_per_1m_in_cached": 0.3,
1504      "cost_per_1m_out_cached": 0.03,
1505      "context_window": 200000,
1506      "default_max_tokens": 2048,
1507      "can_reason": false,
1508      "supports_attachments": true
1509    },
1510    {
1511      "id": "anthropic/claude-3-haiku",
1512      "model": "Anthropic: Claude 3 Haiku",
1513      "cost_per_1m_in": 0.25,
1514      "cost_per_1m_out": 1.25,
1515      "cost_per_1m_in_cached": 0.3,
1516      "cost_per_1m_out_cached": 0.03,
1517      "context_window": 200000,
1518      "default_max_tokens": 2048,
1519      "can_reason": false,
1520      "supports_attachments": true
1521    },
1522    {
1523      "id": "anthropic/claude-3-opus:beta",
1524      "model": "Anthropic: Claude 3 Opus (self-moderated)",
1525      "cost_per_1m_in": 15,
1526      "cost_per_1m_out": 75,
1527      "cost_per_1m_in_cached": 18.75,
1528      "cost_per_1m_out_cached": 1.5,
1529      "context_window": 200000,
1530      "default_max_tokens": 2048,
1531      "can_reason": false,
1532      "supports_attachments": true
1533    },
1534    {
1535      "id": "anthropic/claude-3-opus",
1536      "model": "Anthropic: Claude 3 Opus",
1537      "cost_per_1m_in": 15,
1538      "cost_per_1m_out": 75,
1539      "cost_per_1m_in_cached": 18.75,
1540      "cost_per_1m_out_cached": 1.5,
1541      "context_window": 200000,
1542      "default_max_tokens": 2048,
1543      "can_reason": false,
1544      "supports_attachments": true
1545    },
1546    {
1547      "id": "anthropic/claude-3-sonnet:beta",
1548      "model": "Anthropic: Claude 3 Sonnet (self-moderated)",
1549      "cost_per_1m_in": 3,
1550      "cost_per_1m_out": 15,
1551      "cost_per_1m_in_cached": 3.75,
1552      "cost_per_1m_out_cached": 0.3,
1553      "context_window": 200000,
1554      "default_max_tokens": 2048,
1555      "can_reason": false,
1556      "supports_attachments": true
1557    },
1558    {
1559      "id": "anthropic/claude-3-sonnet",
1560      "model": "Anthropic: Claude 3 Sonnet",
1561      "cost_per_1m_in": 3,
1562      "cost_per_1m_out": 15,
1563      "cost_per_1m_in_cached": 3.75,
1564      "cost_per_1m_out_cached": 0.3,
1565      "context_window": 200000,
1566      "default_max_tokens": 2048,
1567      "can_reason": false,
1568      "supports_attachments": true
1569    },
1570    {
1571      "id": "cohere/command-r-03-2024",
1572      "model": "Cohere: Command R (03-2024)",
1573      "cost_per_1m_in": 0.5,
1574      "cost_per_1m_out": 1.5,
1575      "cost_per_1m_in_cached": 0,
1576      "cost_per_1m_out_cached": 0,
1577      "context_window": 128000,
1578      "default_max_tokens": 2000,
1579      "can_reason": false,
1580      "supports_attachments": false
1581    },
1582    {
1583      "id": "mistralai/mistral-large",
1584      "model": "Mistral Large",
1585      "cost_per_1m_in": 2,
1586      "cost_per_1m_out": 6,
1587      "cost_per_1m_in_cached": 0,
1588      "cost_per_1m_out_cached": 0,
1589      "context_window": 128000,
1590      "default_max_tokens": 12800,
1591      "can_reason": false,
1592      "supports_attachments": false
1593    },
1594    {
1595      "id": "openai/gpt-3.5-turbo-0613",
1596      "model": "OpenAI: GPT-3.5 Turbo (older v0613)",
1597      "cost_per_1m_in": 1,
1598      "cost_per_1m_out": 2,
1599      "cost_per_1m_in_cached": 0,
1600      "cost_per_1m_out_cached": 0,
1601      "context_window": 4095,
1602      "default_max_tokens": 2048,
1603      "can_reason": false,
1604      "supports_attachments": false
1605    },
1606    {
1607      "id": "openai/gpt-4-turbo-preview",
1608      "model": "OpenAI: GPT-4 Turbo Preview",
1609      "cost_per_1m_in": 10,
1610      "cost_per_1m_out": 30,
1611      "cost_per_1m_in_cached": 0,
1612      "cost_per_1m_out_cached": 0,
1613      "context_window": 128000,
1614      "default_max_tokens": 2048,
1615      "can_reason": false,
1616      "supports_attachments": false
1617    },
1618    {
1619      "id": "mistralai/mistral-small",
1620      "model": "Mistral Small",
1621      "cost_per_1m_in": 0.19999999999999998,
1622      "cost_per_1m_out": 0.6,
1623      "cost_per_1m_in_cached": 0,
1624      "cost_per_1m_out_cached": 0,
1625      "context_window": 32768,
1626      "default_max_tokens": 3276,
1627      "can_reason": false,
1628      "supports_attachments": false
1629    },
1630    {
1631      "id": "mistralai/mistral-tiny",
1632      "model": "Mistral Tiny",
1633      "cost_per_1m_in": 0.25,
1634      "cost_per_1m_out": 0.25,
1635      "cost_per_1m_in_cached": 0,
1636      "cost_per_1m_out_cached": 0,
1637      "context_window": 32768,
1638      "default_max_tokens": 3276,
1639      "can_reason": false,
1640      "supports_attachments": false
1641    },
1642    {
1643      "id": "mistralai/mixtral-8x7b-instruct",
1644      "model": "Mistral: Mixtral 8x7B Instruct",
1645      "cost_per_1m_in": 0.08,
1646      "cost_per_1m_out": 0.24,
1647      "cost_per_1m_in_cached": 0,
1648      "cost_per_1m_out_cached": 0,
1649      "context_window": 32768,
1650      "default_max_tokens": 8192,
1651      "can_reason": false,
1652      "supports_attachments": false
1653    },
1654    {
1655      "id": "openai/gpt-4-1106-preview",
1656      "model": "OpenAI: GPT-4 Turbo (older v1106)",
1657      "cost_per_1m_in": 10,
1658      "cost_per_1m_out": 30,
1659      "cost_per_1m_in_cached": 0,
1660      "cost_per_1m_out_cached": 0,
1661      "context_window": 128000,
1662      "default_max_tokens": 2048,
1663      "can_reason": false,
1664      "supports_attachments": false
1665    },
1666    {
1667      "id": "mistralai/mistral-7b-instruct-v0.1",
1668      "model": "Mistral: Mistral 7B Instruct v0.1",
1669      "cost_per_1m_in": 0.11,
1670      "cost_per_1m_out": 0.19,
1671      "cost_per_1m_in_cached": 0,
1672      "cost_per_1m_out_cached": 0,
1673      "context_window": 2824,
1674      "default_max_tokens": 282,
1675      "can_reason": false,
1676      "supports_attachments": false
1677    },
1678    {
1679      "id": "openai/gpt-3.5-turbo-16k",
1680      "model": "OpenAI: GPT-3.5 Turbo 16k",
1681      "cost_per_1m_in": 3,
1682      "cost_per_1m_out": 4,
1683      "cost_per_1m_in_cached": 0,
1684      "cost_per_1m_out_cached": 0,
1685      "context_window": 16385,
1686      "default_max_tokens": 2048,
1687      "can_reason": false,
1688      "supports_attachments": false
1689    },
1690    {
1691      "id": "openai/gpt-4",
1692      "model": "OpenAI: GPT-4",
1693      "cost_per_1m_in": 30,
1694      "cost_per_1m_out": 60,
1695      "cost_per_1m_in_cached": 0,
1696      "cost_per_1m_out_cached": 0,
1697      "context_window": 8191,
1698      "default_max_tokens": 2048,
1699      "can_reason": false,
1700      "supports_attachments": false
1701    },
1702    {
1703      "id": "openai/gpt-4-0314",
1704      "model": "OpenAI: GPT-4 (older v0314)",
1705      "cost_per_1m_in": 30,
1706      "cost_per_1m_out": 60,
1707      "cost_per_1m_in_cached": 0,
1708      "cost_per_1m_out_cached": 0,
1709      "context_window": 8191,
1710      "default_max_tokens": 2048,
1711      "can_reason": false,
1712      "supports_attachments": false
1713    }
1714  ]
1715}