@@ -7,6 +7,45 @@
"default_large_model_id": "anthropic/claude-sonnet-4",
"default_small_model_id": "anthropic/claude-3.5-haiku",
"models": [
+ {
+ "id": "qwen/qwen3-coder",
+ "model": "Qwen: Qwen3 Coder ",
+ "cost_per_1m_in": 1,
+ "cost_per_1m_out": 5,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 1000000,
+ "default_max_tokens": 32768,
+ "can_reason": false,
+ "has_reasoning_efforts": false,
+ "supports_attachments": false
+ },
+ {
+ "id": "google/gemini-2.5-flash-lite",
+ "model": "Google: Gemini 2.5 Flash Lite",
+ "cost_per_1m_in": 0.09999999999999999,
+ "cost_per_1m_out": 0.39999999999999997,
+ "cost_per_1m_in_cached": 0.18330000000000002,
+ "cost_per_1m_out_cached": 0.024999999999999998,
+ "context_window": 1048576,
+ "default_max_tokens": 32767,
+ "can_reason": true,
+ "has_reasoning_efforts": false,
+ "supports_attachments": true
+ },
+ {
+ "id": "qwen/qwen3-235b-a22b-07-25",
+ "model": "Qwen: Qwen3 235B A22B 2507",
+ "cost_per_1m_in": 0.12,
+ "cost_per_1m_out": 0.59,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 262144,
+ "default_max_tokens": 131072,
+ "can_reason": false,
+ "has_reasoning_efforts": false,
+ "supports_attachments": false
+ },
{
"id": "moonshotai/kimi-k2:free",
"model": "MoonshotAI: Kimi K2 (free)",
@@ -49,12 +88,12 @@
{
"id": "mistralai/devstral-small",
"model": "Mistral: Devstral Small 1.1",
- "cost_per_1m_in": 0.09,
- "cost_per_1m_out": 0.3,
+ "cost_per_1m_in": 0.07,
+ "cost_per_1m_out": 0.28,
"cost_per_1m_in_cached": 0,
"cost_per_1m_out_cached": 0,
- "context_window": 131072,
- "default_max_tokens": 65536,
+ "context_window": 128000,
+ "default_max_tokens": 12800,
"can_reason": false,
"has_reasoning_efforts": false,
"supports_attachments": false
@@ -192,12 +231,12 @@
{
"id": "mistralai/magistral-small-2506",
"model": "Mistral: Magistral Small 2506",
- "cost_per_1m_in": 0.09999999999999999,
- "cost_per_1m_out": 0.3,
+ "cost_per_1m_in": 0.5,
+ "cost_per_1m_out": 1.5,
"cost_per_1m_in_cached": 0,
"cost_per_1m_out_cached": 0,
- "context_window": 40960,
- "default_max_tokens": 20480,
+ "context_window": 40000,
+ "default_max_tokens": 20000,
"can_reason": true,
"has_reasoning_efforts": false,
"supports_attachments": false
@@ -244,12 +283,12 @@
{
"id": "deepseek/deepseek-r1-0528",
"model": "DeepSeek: R1 0528",
- "cost_per_1m_in": 0.5,
- "cost_per_1m_out": 2,
+ "cost_per_1m_in": 0.272,
+ "cost_per_1m_out": 0.272,
"cost_per_1m_in_cached": 0,
"cost_per_1m_out_cached": 0,
"context_window": 163840,
- "default_max_tokens": 81920,
+ "default_max_tokens": 16384,
"can_reason": true,
"has_reasoning_efforts": false,
"supports_attachments": false
@@ -296,42 +335,16 @@
{
"id": "mistralai/devstral-small-2505",
"model": "Mistral: Devstral Small 2505",
- "cost_per_1m_in": 0.06,
- "cost_per_1m_out": 0.12,
+ "cost_per_1m_in": 0.03,
+ "cost_per_1m_out": 0.03,
"cost_per_1m_in_cached": 0,
"cost_per_1m_out_cached": 0,
- "context_window": 128000,
- "default_max_tokens": 12800,
+ "context_window": 32768,
+ "default_max_tokens": 3276,
"can_reason": false,
"has_reasoning_efforts": false,
"supports_attachments": false
},
- {
- "id": "google/gemini-2.5-flash-preview-05-20",
- "model": "Google: Gemini 2.5 Flash Preview 05-20",
- "cost_per_1m_in": 0.15,
- "cost_per_1m_out": 0.6,
- "cost_per_1m_in_cached": 0.2333,
- "cost_per_1m_out_cached": 0.0375,
- "context_window": 1048576,
- "default_max_tokens": 32767,
- "can_reason": true,
- "has_reasoning_efforts": false,
- "supports_attachments": true
- },
- {
- "id": "google/gemini-2.5-flash-preview-05-20:thinking",
- "model": "Google: Gemini 2.5 Flash Preview 05-20 (thinking)",
- "cost_per_1m_in": 0.15,
- "cost_per_1m_out": 3.5,
- "cost_per_1m_in_cached": 0.2333,
- "cost_per_1m_out_cached": 0.0375,
- "context_window": 1048576,
- "default_max_tokens": 32767,
- "can_reason": true,
- "has_reasoning_efforts": false,
- "supports_attachments": true
- },
{
"id": "openai/codex-mini",
"model": "OpenAI: Codex Mini",
@@ -452,8 +465,8 @@
{
"id": "qwen/qwen3-32b",
"model": "Qwen: Qwen3 32B",
- "cost_per_1m_in": 0.09999999999999999,
- "cost_per_1m_out": 0.3,
+ "cost_per_1m_in": 0.027,
+ "cost_per_1m_out": 0.027,
"cost_per_1m_in_cached": 0,
"cost_per_1m_out_cached": 0,
"context_window": 40960,
@@ -488,32 +501,6 @@
"has_reasoning_efforts": false,
"supports_attachments": false
},
- {
- "id": "google/gemini-2.5-flash-preview",
- "model": "Google: Gemini 2.5 Flash Preview 04-17",
- "cost_per_1m_in": 0.15,
- "cost_per_1m_out": 0.6,
- "cost_per_1m_in_cached": 0.2333,
- "cost_per_1m_out_cached": 0.0375,
- "context_window": 1048576,
- "default_max_tokens": 32767,
- "can_reason": true,
- "has_reasoning_efforts": false,
- "supports_attachments": true
- },
- {
- "id": "google/gemini-2.5-flash-preview:thinking",
- "model": "Google: Gemini 2.5 Flash Preview 04-17 (thinking)",
- "cost_per_1m_in": 0.15,
- "cost_per_1m_out": 3.5,
- "cost_per_1m_in_cached": 0.2333,
- "cost_per_1m_out_cached": 0.0375,
- "context_window": 1048576,
- "default_max_tokens": 32767,
- "can_reason": true,
- "has_reasoning_efforts": false,
- "supports_attachments": true
- },
{
"id": "openai/o4-mini-high",
"model": "OpenAI: o4 Mini High",
@@ -664,7 +651,7 @@
"cost_per_1m_out": 0,
"cost_per_1m_in_cached": 0,
"cost_per_1m_out_cached": 0,
- "context_window": 16384,
+ "context_window": 32768,
"default_max_tokens": 8192,
"can_reason": false,
"has_reasoning_efforts": false,
@@ -699,12 +686,12 @@
{
"id": "mistralai/mistral-small-3.1-24b-instruct",
"model": "Mistral: Mistral Small 3.1 24B",
- "cost_per_1m_in": 0.049999999999999996,
- "cost_per_1m_out": 0.09999999999999999,
+ "cost_per_1m_in": 0.027,
+ "cost_per_1m_out": 0.027,
"cost_per_1m_in_cached": 0,
"cost_per_1m_out_cached": 0,
- "context_window": 128000,
- "default_max_tokens": 12800,
+ "context_window": 96000,
+ "default_max_tokens": 48000,
"can_reason": false,
"has_reasoning_efforts": false,
"supports_attachments": true
@@ -881,12 +868,12 @@
{
"id": "mistralai/mistral-small-24b-instruct-2501",
"model": "Mistral: Mistral Small 3",
- "cost_per_1m_in": 0.049999999999999996,
- "cost_per_1m_out": 0.08,
+ "cost_per_1m_in": 0.03,
+ "cost_per_1m_out": 0.03,
"cost_per_1m_in_cached": 0,
"cost_per_1m_out_cached": 0,
"context_window": 32768,
- "default_max_tokens": 16384,
+ "default_max_tokens": 3276,
"can_reason": false,
"has_reasoning_efforts": false,
"supports_attachments": false
@@ -894,12 +881,12 @@
{
"id": "deepseek/deepseek-r1-distill-llama-70b",
"model": "DeepSeek: R1 Distill Llama 70B",
- "cost_per_1m_in": 0.09999999999999999,
- "cost_per_1m_out": 0.39999999999999997,
+ "cost_per_1m_in": 0.049999999999999996,
+ "cost_per_1m_out": 0.049999999999999996,
"cost_per_1m_in_cached": 0,
"cost_per_1m_out_cached": 0,
"context_window": 131072,
- "default_max_tokens": 8192,
+ "default_max_tokens": 13107,
"can_reason": true,
"has_reasoning_efforts": false,
"supports_attachments": false
@@ -933,12 +920,12 @@
{
"id": "deepseek/deepseek-chat",
"model": "DeepSeek: DeepSeek V3",
- "cost_per_1m_in": 0.3,
- "cost_per_1m_out": 0.85,
+ "cost_per_1m_in": 0.272,
+ "cost_per_1m_out": 0.272,
"cost_per_1m_in_cached": 0,
"cost_per_1m_out_cached": 0,
"context_window": 163840,
- "default_max_tokens": 81920,
+ "default_max_tokens": 16384,
"can_reason": false,
"has_reasoning_efforts": false,
"supports_attachments": false
@@ -1284,12 +1271,12 @@
{
"id": "qwen/qwen-2.5-72b-instruct",
"model": "Qwen2.5 72B Instruct",
- "cost_per_1m_in": 0.12,
- "cost_per_1m_out": 0.39,
+ "cost_per_1m_in": 0.101,
+ "cost_per_1m_out": 0.101,
"cost_per_1m_in_cached": 0,
"cost_per_1m_out_cached": 0,
"context_window": 32768,
- "default_max_tokens": 8192,
+ "default_max_tokens": 3276,
"can_reason": false,
"has_reasoning_efforts": false,
"supports_attachments": false
@@ -1379,8 +1366,8 @@
"cost_per_1m_out": 0.02,
"cost_per_1m_in_cached": 0,
"cost_per_1m_out_cached": 0,
- "context_window": 131000,
- "default_max_tokens": 65500,
+ "context_window": 131072,
+ "default_max_tokens": 8192,
"can_reason": false,
"has_reasoning_efforts": false,
"supports_attachments": false
@@ -1749,19 +1736,6 @@
"has_reasoning_efforts": false,
"supports_attachments": true
},
- {
- "id": "anthropic/claude-3-sonnet:beta",
- "model": "Anthropic: Claude 3 Sonnet (self-moderated)",
- "cost_per_1m_in": 3,
- "cost_per_1m_out": 15,
- "cost_per_1m_in_cached": 3.75,
- "cost_per_1m_out_cached": 0.3,
- "context_window": 200000,
- "default_max_tokens": 2048,
- "can_reason": false,
- "has_reasoning_efforts": false,
- "supports_attachments": true
- },
{
"id": "anthropic/claude-3-sonnet",
"model": "Anthropic: Claude 3 Sonnet",
@@ -1905,6 +1879,19 @@
"has_reasoning_efforts": false,
"supports_attachments": false
},
+ {
+ "id": "openai/gpt-3.5-turbo",
+ "model": "OpenAI: GPT-3.5 Turbo",
+ "cost_per_1m_in": 0.5,
+ "cost_per_1m_out": 1.5,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 16385,
+ "default_max_tokens": 2048,
+ "can_reason": false,
+ "has_reasoning_efforts": false,
+ "supports_attachments": false
+ },
{
"id": "openai/gpt-4",
"model": "OpenAI: GPT-4",