chore: add more openrouter models

Kujtim Hoxha created

Change summary

internal/providers/configs/openrouter.json | 368 +++++++++++++++++------
1 file changed, 275 insertions(+), 93 deletions(-)

Detailed changes

internal/providers/configs/openrouter.json 🔗

@@ -5,8 +5,86 @@
   "api_endpoint": "https://openrouter.ai/api/v1",
   "type": "openai",
   "default_large_model_id": "anthropic/claude-sonnet-4",
-  "default_small_model_id": "anthropic/claude-3.5-haiku",
+  "default_small_model_id": "anthropic/claude-haiku-3.5",
   "models": [
+    {
+      "id": "moonshotai/kimi-k2:free",
+      "model": "MoonshotAI: Kimi K2 (free)",
+      "cost_per_1m_in": 0,
+      "cost_per_1m_out": 0,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 65536,
+      "default_max_tokens": 6553,
+      "can_reason": false,
+      "has_reasoning_efforts": false,
+      "supports_attachments": false
+    },
+    {
+      "id": "moonshotai/kimi-k2",
+      "model": "MoonshotAI: Kimi K2",
+      "cost_per_1m_in": 0.14,
+      "cost_per_1m_out": 2.4899999999999998,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 63000,
+      "default_max_tokens": 31500,
+      "can_reason": false,
+      "has_reasoning_efforts": false,
+      "supports_attachments": false
+    },
+    {
+      "id": "mistralai/devstral-medium",
+      "model": "Mistral: Devstral Medium",
+      "cost_per_1m_in": 0.39999999999999997,
+      "cost_per_1m_out": 2,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 131072,
+      "default_max_tokens": 13107,
+      "can_reason": false,
+      "has_reasoning_efforts": false,
+      "supports_attachments": false
+    },
+    {
+      "id": "mistralai/devstral-small",
+      "model": "Mistral: Devstral Small 1.1",
+      "cost_per_1m_in": 0.09,
+      "cost_per_1m_out": 0.3,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 131072,
+      "default_max_tokens": 65536,
+      "can_reason": false,
+      "has_reasoning_efforts": false,
+      "supports_attachments": false
+    },
+    {
+      "id": "x-ai/grok-4",
+      "model": "xAI: Grok 4",
+      "cost_per_1m_in": 3,
+      "cost_per_1m_out": 15,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.75,
+      "context_window": 256000,
+      "default_max_tokens": 25600,
+      "can_reason": true,
+      "has_reasoning_efforts": false,
+      "supports_attachments": true
+    },
+    {
+      "id": "openrouter/cypher-alpha:free",
+      "model": "Cypher Alpha (free)",
+      "cost_per_1m_in": 0,
+      "cost_per_1m_out": 0,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 1000000,
+      "default_max_tokens": 5000,
+      "can_reason": true,
+      "has_reasoning_efforts": false,
+      "supports_attachments": false
+    },
     {
       "id": "mistralai/mistral-small-3.2-24b-instruct:free",
       "model": "Mistral: Mistral Small 3.2 24B (free)",
@@ -17,6 +95,7 @@
       "context_window": 96000,
       "default_max_tokens": 9600,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -29,20 +108,9 @@
       "context_window": 128000,
       "default_max_tokens": 12800,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
-    {
-      "id": "minimax/minimax-m1:extended",
-      "model": "MiniMax: MiniMax M1 (extended)",
-      "cost_per_1m_in": 0,
-      "cost_per_1m_out": 0,
-      "cost_per_1m_in_cached": 0,
-      "cost_per_1m_out_cached": 0,
-      "context_window": 256000,
-      "default_max_tokens": 25600,
-      "can_reason": true,
-      "supports_attachments": false
-    },
     {
       "id": "minimax/minimax-m1",
       "model": "MiniMax: MiniMax M1",
@@ -53,6 +121,7 @@
       "context_window": 1000000,
       "default_max_tokens": 20000,
       "can_reason": true,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -60,11 +129,12 @@
       "model": "Google: Gemini 2.5 Flash Lite Preview 06-17",
       "cost_per_1m_in": 0.09999999999999999,
       "cost_per_1m_out": 0.39999999999999997,
-      "cost_per_1m_in_cached": 0,
-      "cost_per_1m_out_cached": 0,
+      "cost_per_1m_in_cached": 0.18330000000000002,
+      "cost_per_1m_out_cached": 0.024999999999999998,
       "context_window": 1048576,
       "default_max_tokens": 32767,
       "can_reason": true,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -77,6 +147,7 @@
       "context_window": 1048576,
       "default_max_tokens": 32767,
       "can_reason": true,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -89,6 +160,7 @@
       "context_window": 1048576,
       "default_max_tokens": 32768,
       "can_reason": true,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -101,6 +173,7 @@
       "context_window": 200000,
       "default_max_tokens": 50000,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -113,6 +186,7 @@
       "context_window": 131072,
       "default_max_tokens": 13107,
       "can_reason": true,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -125,18 +199,20 @@
       "context_window": 131072,
       "default_max_tokens": 13107,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
       "id": "mistralai/magistral-small-2506",
       "model": "Mistral: Magistral Small 2506",
-      "cost_per_1m_in": 0.5,
-      "cost_per_1m_out": 1.5,
+      "cost_per_1m_in": 0.09999999999999999,
+      "cost_per_1m_out": 0.3,
       "cost_per_1m_in_cached": 0,
       "cost_per_1m_out_cached": 0,
-      "context_window": 40000,
-      "default_max_tokens": 20000,
+      "context_window": 40960,
+      "default_max_tokens": 20480,
       "can_reason": true,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -149,6 +225,7 @@
       "context_window": 40960,
       "default_max_tokens": 20000,
       "can_reason": true,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -161,6 +238,7 @@
       "context_window": 40960,
       "default_max_tokens": 20000,
       "can_reason": true,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -173,18 +251,20 @@
       "context_window": 1048576,
       "default_max_tokens": 32768,
       "can_reason": true,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
       "id": "deepseek/deepseek-r1-0528",
       "model": "DeepSeek: R1 0528",
       "cost_per_1m_in": 0.5,
-      "cost_per_1m_out": 2.1500000000000004,
+      "cost_per_1m_out": 2,
       "cost_per_1m_in_cached": 0,
       "cost_per_1m_out_cached": 0,
-      "context_window": 128000,
-      "default_max_tokens": 16384,
+      "context_window": 163840,
+      "default_max_tokens": 81920,
       "can_reason": true,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -197,6 +277,7 @@
       "context_window": 200000,
       "default_max_tokens": 16000,
       "can_reason": true,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -209,23 +290,25 @@
       "context_window": 200000,
       "default_max_tokens": 32000,
       "can_reason": true,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
-      "id": "mistralai/devstral-small:free",
-      "model": "Mistral: Devstral Small (free)",
+      "id": "mistralai/devstral-small-2505:free",
+      "model": "Mistral: Devstral Small 2505 (free)",
       "cost_per_1m_in": 0,
       "cost_per_1m_out": 0,
       "cost_per_1m_in_cached": 0,
       "cost_per_1m_out_cached": 0,
-      "context_window": 131072,
-      "default_max_tokens": 13107,
+      "context_window": 32768,
+      "default_max_tokens": 3276,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
-      "id": "mistralai/devstral-small",
-      "model": "Mistral: Devstral Small",
+      "id": "mistralai/devstral-small-2505",
+      "model": "Mistral: Devstral Small 2505",
       "cost_per_1m_in": 0.06,
       "cost_per_1m_out": 0.12,
       "cost_per_1m_in_cached": 0,
@@ -233,6 +316,7 @@
       "context_window": 128000,
       "default_max_tokens": 12800,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -245,6 +329,7 @@
       "context_window": 1048576,
       "default_max_tokens": 32767,
       "can_reason": true,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -257,6 +342,7 @@
       "context_window": 1048576,
       "default_max_tokens": 32767,
       "can_reason": true,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -269,6 +355,7 @@
       "context_window": 200000,
       "default_max_tokens": 50000,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -278,9 +365,10 @@
       "cost_per_1m_out": 2,
       "cost_per_1m_in_cached": 0,
       "cost_per_1m_out_cached": 0,
-      "context_window": 32768,
-      "default_max_tokens": 3276,
+      "context_window": 131072,
+      "default_max_tokens": 13107,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -292,7 +380,8 @@
       "cost_per_1m_out_cached": 0.31,
       "context_window": 1048576,
       "default_max_tokens": 32767,
-      "can_reason": false,
+      "can_reason": true,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -305,6 +394,7 @@
       "context_window": 32768,
       "default_max_tokens": 3276,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -317,6 +407,7 @@
       "context_window": 131072,
       "default_max_tokens": 32000,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -329,6 +420,20 @@
       "context_window": 131072,
       "default_max_tokens": 16384,
       "can_reason": false,
+      "has_reasoning_efforts": false,
+      "supports_attachments": false
+    },
+    {
+      "id": "qwen/qwen3-4b:free",
+      "model": "Qwen: Qwen3 4B (free)",
+      "cost_per_1m_in": 0,
+      "cost_per_1m_out": 0,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 40960,
+      "default_max_tokens": 4096,
+      "can_reason": true,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -341,6 +446,7 @@
       "context_window": 40960,
       "default_max_tokens": 20480,
       "can_reason": true,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -353,6 +459,7 @@
       "context_window": 40960,
       "default_max_tokens": 20480,
       "can_reason": true,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -365,6 +472,20 @@
       "context_window": 40960,
       "default_max_tokens": 4096,
       "can_reason": true,
+      "has_reasoning_efforts": false,
+      "supports_attachments": false
+    },
+    {
+      "id": "qwen/qwen3-235b-a22b:free",
+      "model": "Qwen: Qwen3 235B A22B (free)",
+      "cost_per_1m_in": 0,
+      "cost_per_1m_out": 0,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 131072,
+      "default_max_tokens": 13107,
+      "can_reason": true,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -377,6 +498,7 @@
       "context_window": 40960,
       "default_max_tokens": 20480,
       "can_reason": true,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -388,7 +510,8 @@
       "cost_per_1m_out_cached": 0.0375,
       "context_window": 1048576,
       "default_max_tokens": 32767,
-      "can_reason": false,
+      "can_reason": true,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -400,7 +523,8 @@
       "cost_per_1m_out_cached": 0.0375,
       "context_window": 1048576,
       "default_max_tokens": 32767,
-      "can_reason": false,
+      "can_reason": true,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -413,6 +537,7 @@
       "context_window": 200000,
       "default_max_tokens": 50000,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -425,6 +550,7 @@
       "context_window": 200000,
       "default_max_tokens": 50000,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -437,6 +563,7 @@
       "context_window": 200000,
       "default_max_tokens": 50000,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -449,6 +576,7 @@
       "context_window": 1047576,
       "default_max_tokens": 16384,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -461,6 +589,7 @@
       "context_window": 1047576,
       "default_max_tokens": 16384,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -473,6 +602,7 @@
       "context_window": 1047576,
       "default_max_tokens": 16384,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -485,6 +615,7 @@
       "context_window": 131072,
       "default_max_tokens": 13107,
       "can_reason": true,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -497,6 +628,7 @@
       "context_window": 131072,
       "default_max_tokens": 13107,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -509,6 +641,7 @@
       "context_window": 1048576,
       "default_max_tokens": 8192,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -521,20 +654,9 @@
       "context_window": 1048576,
       "default_max_tokens": 524288,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
-    {
-      "id": "all-hands/openhands-lm-32b-v0.1",
-      "model": "OpenHands LM 32B V0.1",
-      "cost_per_1m_in": 2.6,
-      "cost_per_1m_out": 3.4,
-      "cost_per_1m_in_cached": 0,
-      "cost_per_1m_out_cached": 0,
-      "context_window": 16384,
-      "default_max_tokens": 2048,
-      "can_reason": false,
-      "supports_attachments": false
-    },
     {
       "id": "google/gemini-2.5-pro-exp-03-25",
       "model": "Google: Gemini 2.5 Pro Experimental",
@@ -545,6 +667,7 @@
       "context_window": 1048576,
       "default_max_tokens": 32767,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -554,21 +677,23 @@
       "cost_per_1m_out": 0,
       "cost_per_1m_in_cached": 0,
       "cost_per_1m_out_cached": 0,
-      "context_window": 163840,
-      "default_max_tokens": 16384,
+      "context_window": 16384,
+      "default_max_tokens": 8192,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
       "id": "deepseek/deepseek-chat-v3-0324",
       "model": "DeepSeek: DeepSeek V3 0324",
-      "cost_per_1m_in": 0.28,
-      "cost_per_1m_out": 0.88,
+      "cost_per_1m_in": 0.25,
+      "cost_per_1m_out": 0.85,
       "cost_per_1m_in_cached": 0,
       "cost_per_1m_out_cached": 0,
       "context_window": 163840,
-      "default_max_tokens": 16384,
+      "default_max_tokens": 81920,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -578,9 +703,10 @@
       "cost_per_1m_out": 0,
       "cost_per_1m_in_cached": 0,
       "cost_per_1m_out_cached": 0,
-      "context_window": 96000,
-      "default_max_tokens": 48000,
+      "context_window": 128000,
+      "default_max_tokens": 12800,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -593,6 +719,7 @@
       "context_window": 128000,
       "default_max_tokens": 12800,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -605,6 +732,7 @@
       "context_window": 256000,
       "default_max_tokens": 2048,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -617,6 +745,7 @@
       "context_window": 256000,
       "default_max_tokens": 2048,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -629,6 +758,7 @@
       "context_window": 128000,
       "default_max_tokens": 8192,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -641,6 +771,7 @@
       "context_window": 1048576,
       "default_max_tokens": 4096,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -653,23 +784,25 @@
       "context_window": 200000,
       "default_max_tokens": 32000,
       "can_reason": true,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
-      "id": "anthropic/claude-3.7-sonnet:beta",
-      "model": "Anthropic: Claude 3.7 Sonnet (self-moderated)",
+      "id": "anthropic/claude-3.7-sonnet:thinking",
+      "model": "Anthropic: Claude 3.7 Sonnet (thinking)",
       "cost_per_1m_in": 3,
       "cost_per_1m_out": 15,
       "cost_per_1m_in_cached": 3.75,
       "cost_per_1m_out_cached": 0.3,
       "context_window": 200000,
-      "default_max_tokens": 64000,
+      "default_max_tokens": 32000,
       "can_reason": true,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
-      "id": "anthropic/claude-3.7-sonnet:thinking",
-      "model": "Anthropic: Claude 3.7 Sonnet (thinking)",
+      "id": "anthropic/claude-3.7-sonnet:beta",
+      "model": "Anthropic: Claude 3.7 Sonnet (self-moderated)",
       "cost_per_1m_in": 3,
       "cost_per_1m_out": 15,
       "cost_per_1m_in_cached": 3.75,
@@ -677,6 +810,7 @@
       "context_window": 200000,
       "default_max_tokens": 64000,
       "can_reason": true,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -689,6 +823,7 @@
       "context_window": 32768,
       "default_max_tokens": 3276,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -701,6 +836,7 @@
       "context_window": 200000,
       "default_max_tokens": 50000,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -713,6 +849,7 @@
       "context_window": 1048576,
       "default_max_tokens": 4096,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -725,6 +862,7 @@
       "context_window": 1000000,
       "default_max_tokens": 4096,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -737,6 +875,7 @@
       "context_window": 131072,
       "default_max_tokens": 4096,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -749,6 +888,7 @@
       "context_window": 32768,
       "default_max_tokens": 4096,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -761,18 +901,20 @@
       "context_window": 200000,
       "default_max_tokens": 50000,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
       "id": "mistralai/mistral-small-24b-instruct-2501",
       "model": "Mistral: Mistral Small 3",
       "cost_per_1m_in": 0.049999999999999996,
-      "cost_per_1m_out": 0.09,
+      "cost_per_1m_out": 0.08,
       "cost_per_1m_in_cached": 0,
       "cost_per_1m_out_cached": 0,
       "context_window": 32768,
       "default_max_tokens": 16384,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -785,18 +927,20 @@
       "context_window": 131072,
       "default_max_tokens": 8192,
       "can_reason": true,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
       "id": "deepseek/deepseek-r1",
       "model": "DeepSeek: R1",
-      "cost_per_1m_in": 0.44999999999999996,
-      "cost_per_1m_out": 2.1500000000000004,
+      "cost_per_1m_in": 0.39999999999999997,
+      "cost_per_1m_out": 2,
       "cost_per_1m_in_cached": 0,
       "cost_per_1m_out_cached": 0,
-      "context_window": 128000,
-      "default_max_tokens": 16384,
+      "context_window": 163840,
+      "default_max_tokens": 81920,
       "can_reason": true,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -809,18 +953,20 @@
       "context_window": 262144,
       "default_max_tokens": 26214,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
       "id": "deepseek/deepseek-chat",
       "model": "DeepSeek: DeepSeek V3",
-      "cost_per_1m_in": 0.38,
-      "cost_per_1m_out": 0.8899999999999999,
+      "cost_per_1m_in": 0.3,
+      "cost_per_1m_out": 0.85,
       "cost_per_1m_in_cached": 0,
       "cost_per_1m_out_cached": 0,
       "context_window": 163840,
       "default_max_tokens": 81920,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -833,6 +979,7 @@
       "context_window": 200000,
       "default_max_tokens": 50000,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -845,18 +992,46 @@
       "context_window": 131072,
       "default_max_tokens": 13107,
       "can_reason": false,
+      "has_reasoning_efforts": false,
+      "supports_attachments": false
+    },
+    {
+      "id": "google/gemini-2.0-flash-exp:free",
+      "model": "Google: Gemini 2.0 Flash Experimental (free)",
+      "cost_per_1m_in": 0,
+      "cost_per_1m_out": 0,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 1048576,
+      "default_max_tokens": 4096,
+      "can_reason": false,
+      "has_reasoning_efforts": false,
+      "supports_attachments": true
+    },
+    {
+      "id": "meta-llama/llama-3.3-70b-instruct:free",
+      "model": "Meta: Llama 3.3 70B Instruct (free)",
+      "cost_per_1m_in": 0,
+      "cost_per_1m_out": 0,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 65536,
+      "default_max_tokens": 6553,
+      "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
       "id": "meta-llama/llama-3.3-70b-instruct",
       "model": "Meta: Llama 3.3 70B Instruct",
-      "cost_per_1m_in": 0.049999999999999996,
-      "cost_per_1m_out": 0.16999999999999998,
+      "cost_per_1m_in": 0.038000000000000006,
+      "cost_per_1m_out": 0.12,
       "cost_per_1m_in_cached": 0,
       "cost_per_1m_out_cached": 0,
-      "context_window": 131000,
-      "default_max_tokens": 65500,
+      "context_window": 131072,
+      "default_max_tokens": 8192,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -869,6 +1044,7 @@
       "context_window": 300000,
       "default_max_tokens": 2560,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -881,6 +1057,7 @@
       "context_window": 128000,
       "default_max_tokens": 2560,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -893,6 +1070,7 @@
       "context_window": 300000,
       "default_max_tokens": 2560,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -905,6 +1083,7 @@
       "context_window": 128000,
       "default_max_tokens": 8192,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -917,6 +1096,7 @@
       "context_window": 131072,
       "default_max_tokens": 13107,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -929,6 +1109,7 @@
       "context_window": 131072,
       "default_max_tokens": 13107,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -938,9 +1119,10 @@
       "cost_per_1m_out": 6,
       "cost_per_1m_in_cached": 0,
       "cost_per_1m_out_cached": 0,
-      "context_window": 32768,
-      "default_max_tokens": 3276,
+      "context_window": 131072,
+      "default_max_tokens": 13107,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -953,6 +1135,7 @@
       "context_window": 32768,
       "default_max_tokens": 3276,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -965,6 +1148,7 @@
       "context_window": 200000,
       "default_max_tokens": 4096,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -977,6 +1161,7 @@
       "context_window": 200000,
       "default_max_tokens": 4096,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -989,6 +1174,7 @@
       "context_window": 200000,
       "default_max_tokens": 4096,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -1001,6 +1187,7 @@
       "context_window": 200000,
       "default_max_tokens": 4096,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -1013,6 +1200,7 @@
       "context_window": 200000,
       "default_max_tokens": 4096,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
@@ -1025,18 +1213,20 @@
       "context_window": 200000,
       "default_max_tokens": 4096,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
-      "id": "x-ai/grok-beta",
-      "model": "xAI: Grok Beta",
-      "cost_per_1m_in": 5,
-      "cost_per_1m_out": 15,
+      "id": "mistralai/ministral-3b",
+      "model": "Mistral: Ministral 3B",
+      "cost_per_1m_in": 0.04,
+      "cost_per_1m_out": 0.04,
       "cost_per_1m_in_cached": 0,
       "cost_per_1m_out_cached": 0,
       "context_window": 131072,
       "default_max_tokens": 13107,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -1049,18 +1239,7 @@
       "context_window": 128000,
       "default_max_tokens": 12800,
       "can_reason": false,
-      "supports_attachments": false
-    },
-    {
-      "id": "mistralai/ministral-3b",
-      "model": "Mistral: Ministral 3B",
-      "cost_per_1m_in": 0.04,
-      "cost_per_1m_out": 0.04,
-      "cost_per_1m_in_cached": 0,
-      "cost_per_1m_out_cached": 0,
-      "context_window": 131072,
-      "default_max_tokens": 13107,
-      "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -1073,6 +1252,7 @@
       "context_window": 131072,
       "default_max_tokens": 65536,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": false
     },
     {
@@ -1085,19 +1265,21 @@
       "context_window": 1000000,
       "default_max_tokens": 4096,
       "can_reason": false,
+      "has_reasoning_efforts": false,
       "supports_attachments": true
     },
     {
-      "id": "meta-llama/llama-3.2-11b-vision-instruct",
-      "model": "Meta: Llama 3.2 11B Vision Instruct",
-      "cost_per_1m_in": 0.049,
-      "cost_per_1m_out": 0.049,
+      "id": "thedrummer/rocinante-12b",
+      "model": "TheDrummer: Rocinante 12B",
+      "cost_per_1m_in": 0.19999999999999998,
+      "cost_per_1m_out": 0.5,
       "cost_per_1m_in_cached": 0,
       "cost_per_1m_out_cached": 0,
-      "context_window": 131072,
-      "default_max_tokens": 8192,
+      "context_window": 32768,
+      "default_max_tokens": 3276,
       "can_reason": false,
-      "supports_attachments": true
+      "has_reasoning_efforts": false,
+      "supports_attachments": false
     },
     {
       "id": "meta-llama/llama-3.2-3b-instruct",