@@ -5,7 +5,7 @@
"api_endpoint": "https://openrouter.ai/api/v1",
"type": "openai",
"default_large_model_id": "anthropic/claude-sonnet-4",
- "default_small_model_id": "anthropic/claude-haiku-3.5",
+ "default_small_model_id": "anthropic/claude-3.5-haiku",
"models": [
{
"id": "moonshotai/kimi-k2:free",
@@ -72,19 +72,6 @@
"has_reasoning_efforts": false,
"supports_attachments": true
},
- {
- "id": "openrouter/cypher-alpha:free",
- "model": "Cypher Alpha (free)",
- "cost_per_1m_in": 0,
- "cost_per_1m_out": 0,
- "cost_per_1m_in_cached": 0,
- "cost_per_1m_out_cached": 0,
- "context_window": 1000000,
- "default_max_tokens": 5000,
- "can_reason": true,
- "has_reasoning_efforts": false,
- "supports_attachments": false
- },
{
"id": "mistralai/mistral-small-3.2-24b-instruct:free",
"model": "Mistral: Mistral Small 3.2 24B (free)",
@@ -748,19 +735,6 @@
"has_reasoning_efforts": false,
"supports_attachments": false
},
- {
- "id": "openai/gpt-4.5-preview",
- "model": "OpenAI: GPT-4.5 (Preview)",
- "cost_per_1m_in": 75,
- "cost_per_1m_out": 150,
- "cost_per_1m_in_cached": 0,
- "cost_per_1m_out_cached": 37.5,
- "context_window": 128000,
- "default_max_tokens": 8192,
- "can_reason": false,
- "has_reasoning_efforts": false,
- "supports_attachments": true
- },
{
"id": "google/gemini-2.0-flash-lite-001",
"model": "Google: Gemini 2.0 Flash Lite",
@@ -1217,27 +1191,27 @@
"supports_attachments": true
},
{
- "id": "mistralai/ministral-3b",
- "model": "Mistral: Ministral 3B",
- "cost_per_1m_in": 0.04,
- "cost_per_1m_out": 0.04,
+ "id": "mistralai/ministral-8b",
+ "model": "Mistral: Ministral 8B",
+ "cost_per_1m_in": 0.09999999999999999,
+ "cost_per_1m_out": 0.09999999999999999,
"cost_per_1m_in_cached": 0,
"cost_per_1m_out_cached": 0,
- "context_window": 131072,
- "default_max_tokens": 13107,
+ "context_window": 128000,
+ "default_max_tokens": 12800,
"can_reason": false,
"has_reasoning_efforts": false,
"supports_attachments": false
},
{
- "id": "mistralai/ministral-8b",
- "model": "Mistral: Ministral 8B",
- "cost_per_1m_in": 0.09999999999999999,
- "cost_per_1m_out": 0.09999999999999999,
+ "id": "mistralai/ministral-3b",
+ "model": "Mistral: Ministral 3B",
+ "cost_per_1m_in": 0.04,
+ "cost_per_1m_out": 0.04,
"cost_per_1m_in_cached": 0,
"cost_per_1m_out_cached": 0,
- "context_window": 128000,
- "default_max_tokens": 12800,
+ "context_window": 131072,
+ "default_max_tokens": 13107,
"can_reason": false,
"has_reasoning_efforts": false,
"supports_attachments": false
@@ -1412,26 +1386,26 @@
"supports_attachments": false
},
{
- "id": "meta-llama/llama-3.1-70b-instruct",
- "model": "Meta: Llama 3.1 70B Instruct",
- "cost_per_1m_in": 0.09999999999999999,
- "cost_per_1m_out": 0.28,
+ "id": "meta-llama/llama-3.1-405b-instruct",
+ "model": "Meta: Llama 3.1 405B Instruct",
+ "cost_per_1m_in": 0.7999999999999999,
+ "cost_per_1m_out": 0.7999999999999999,
"cost_per_1m_in_cached": 0,
"cost_per_1m_out_cached": 0,
- "context_window": 131072,
+ "context_window": 32768,
"default_max_tokens": 8192,
"can_reason": false,
"has_reasoning_efforts": false,
"supports_attachments": false
},
{
- "id": "meta-llama/llama-3.1-405b-instruct",
- "model": "Meta: Llama 3.1 405B Instruct",
- "cost_per_1m_in": 0.7999999999999999,
- "cost_per_1m_out": 0.7999999999999999,
+ "id": "meta-llama/llama-3.1-70b-instruct",
+ "model": "Meta: Llama 3.1 70B Instruct",
+ "cost_per_1m_in": 0.09999999999999999,
+ "cost_per_1m_out": 0.28,
"cost_per_1m_in_cached": 0,
"cost_per_1m_out_cached": 0,
- "context_window": 32768,
+ "context_window": 131072,
"default_max_tokens": 8192,
"can_reason": false,
"has_reasoning_efforts": false,
@@ -1440,19 +1414,19 @@
{
"id": "mistralai/mistral-nemo",
"model": "Mistral: Mistral Nemo",
- "cost_per_1m_in": 0.008,
- "cost_per_1m_out": 0.001,
+ "cost_per_1m_in": 0.0075,
+ "cost_per_1m_out": 0.049999999999999996,
"cost_per_1m_in_cached": 0,
"cost_per_1m_out_cached": 0,
- "context_window": 131072,
- "default_max_tokens": 65536,
+ "context_window": 32000,
+ "default_max_tokens": 3200,
"can_reason": false,
"has_reasoning_efforts": false,
"supports_attachments": false
},
{
- "id": "openai/gpt-4o-mini-2024-07-18",
- "model": "OpenAI: GPT-4o-mini (2024-07-18)",
+ "id": "openai/gpt-4o-mini",
+ "model": "OpenAI: GPT-4o-mini",
"cost_per_1m_in": 0.15,
"cost_per_1m_out": 0.6,
"cost_per_1m_in_cached": 0,
@@ -1464,8 +1438,8 @@
"supports_attachments": true
},
{
- "id": "openai/gpt-4o-mini",
- "model": "OpenAI: GPT-4o-mini",
+ "id": "openai/gpt-4o-mini-2024-07-18",
+ "model": "OpenAI: GPT-4o-mini (2024-07-18)",
"cost_per_1m_in": 0.15,
"cost_per_1m_out": 0.6,
"cost_per_1m_in_cached": 0,
@@ -1503,10 +1477,10 @@
"supports_attachments": true
},
{
- "id": "mistralai/mistral-7b-instruct-v0.3",
- "model": "Mistral: Mistral 7B Instruct v0.3",
- "cost_per_1m_in": 0.028,
- "cost_per_1m_out": 0.054,
+ "id": "mistralai/mistral-7b-instruct:free",
+ "model": "Mistral: Mistral 7B Instruct (free)",
+ "cost_per_1m_in": 0,
+ "cost_per_1m_out": 0,
"cost_per_1m_in_cached": 0,
"cost_per_1m_out_cached": 0,
"context_window": 32768,
@@ -1516,10 +1490,10 @@
"supports_attachments": false
},
{
- "id": "mistralai/mistral-7b-instruct:free",
- "model": "Mistral: Mistral 7B Instruct (free)",
- "cost_per_1m_in": 0,
- "cost_per_1m_out": 0,
+ "id": "mistralai/mistral-7b-instruct",
+ "model": "Mistral: Mistral 7B Instruct",
+ "cost_per_1m_in": 0.028,
+ "cost_per_1m_out": 0.054,
"cost_per_1m_in_cached": 0,
"cost_per_1m_out_cached": 0,
"context_window": 32768,
@@ -1529,8 +1503,8 @@
"supports_attachments": false
},
{
- "id": "mistralai/mistral-7b-instruct",
- "model": "Mistral: Mistral 7B Instruct",
+ "id": "mistralai/mistral-7b-instruct-v0.3",
+ "model": "Mistral: Mistral 7B Instruct v0.3",
"cost_per_1m_in": 0.028,
"cost_per_1m_out": 0.054,
"cost_per_1m_in_cached": 0,
@@ -1580,19 +1554,6 @@
"has_reasoning_efforts": false,
"supports_attachments": true
},
- {
- "id": "openai/gpt-4o-2024-05-13",
- "model": "OpenAI: GPT-4o (2024-05-13)",
- "cost_per_1m_in": 5,
- "cost_per_1m_out": 15,
- "cost_per_1m_in_cached": 0,
- "cost_per_1m_out_cached": 0,
- "context_window": 128000,
- "default_max_tokens": 2048,
- "can_reason": false,
- "has_reasoning_efforts": false,
- "supports_attachments": true
- },
{
"id": "openai/gpt-4o",
"model": "OpenAI: GPT-4o",
@@ -1619,6 +1580,19 @@
"has_reasoning_efforts": false,
"supports_attachments": true
},
+ {
+ "id": "openai/gpt-4o-2024-05-13",
+ "model": "OpenAI: GPT-4o (2024-05-13)",
+ "cost_per_1m_in": 5,
+ "cost_per_1m_out": 15,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 2048,
+ "can_reason": false,
+ "has_reasoning_efforts": false,
+ "supports_attachments": true
+ },
{
"id": "meta-llama/llama-3-8b-instruct",
"model": "Meta: Llama 3 8B Instruct",
@@ -1828,36 +1802,36 @@
"supports_attachments": false
},
{
- "id": "openai/gpt-4-turbo-preview",
- "model": "OpenAI: GPT-4 Turbo Preview",
- "cost_per_1m_in": 10,
- "cost_per_1m_out": 30,
+ "id": "openai/gpt-3.5-turbo-0613",
+ "model": "OpenAI: GPT-3.5 Turbo (older v0613)",
+ "cost_per_1m_in": 1,
+ "cost_per_1m_out": 2,
"cost_per_1m_in_cached": 0,
"cost_per_1m_out_cached": 0,
- "context_window": 128000,
+ "context_window": 4095,
"default_max_tokens": 2048,
"can_reason": false,
"has_reasoning_efforts": false,
"supports_attachments": false
},
{
- "id": "openai/gpt-3.5-turbo-0613",
- "model": "OpenAI: GPT-3.5 Turbo (older v0613)",
- "cost_per_1m_in": 1,
- "cost_per_1m_out": 2,
+ "id": "openai/gpt-4-turbo-preview",
+ "model": "OpenAI: GPT-4 Turbo Preview",
+ "cost_per_1m_in": 10,
+ "cost_per_1m_out": 30,
"cost_per_1m_in_cached": 0,
"cost_per_1m_out_cached": 0,
- "context_window": 4095,
+ "context_window": 128000,
"default_max_tokens": 2048,
"can_reason": false,
"has_reasoning_efforts": false,
"supports_attachments": false
},
{
- "id": "mistralai/mistral-tiny",
- "model": "Mistral Tiny",
- "cost_per_1m_in": 0.25,
- "cost_per_1m_out": 0.25,
+ "id": "mistralai/mistral-small",
+ "model": "Mistral Small",
+ "cost_per_1m_in": 0.19999999999999998,
+ "cost_per_1m_out": 0.6,
"cost_per_1m_in_cached": 0,
"cost_per_1m_out_cached": 0,
"context_window": 32768,
@@ -1867,10 +1841,10 @@
"supports_attachments": false
},
{
- "id": "mistralai/mistral-small",
- "model": "Mistral Small",
- "cost_per_1m_in": 0.19999999999999998,
- "cost_per_1m_out": 0.6,
+ "id": "mistralai/mistral-tiny",
+ "model": "Mistral Tiny",
+ "cost_per_1m_in": 0.25,
+ "cost_per_1m_out": 0.25,
"cost_per_1m_in_cached": 0,
"cost_per_1m_out_cached": 0,
"context_window": 32768,