From 6d8f2b2c3bab2d1b90363448c915d68b6ec63e8c Mon Sep 17 00:00:00 2001 From: Charm <124303983+charmcli@users.noreply.github.com> Date: Thu, 7 May 2026 03:14:18 +0000 Subject: [PATCH] chore: auto-update generated files --- internal/providers/configs/aihubmix.json | 34 +++- internal/providers/configs/ionet.json | 6 +- internal/providers/configs/opencode-go.json | 8 +- internal/providers/configs/opencode-zen.json | 6 +- internal/providers/configs/openrouter.json | 192 +++++++++---------- internal/providers/configs/venice.json | 4 +- internal/providers/configs/vercel.json | 24 +-- 7 files changed, 140 insertions(+), 134 deletions(-) diff --git a/internal/providers/configs/aihubmix.json b/internal/providers/configs/aihubmix.json index d47e8fbe4bfb22f322c432fc25a15abb72712c8c..122dd859b42252374cf2a4448eca6ba044b5024c 100644 --- a/internal/providers/configs/aihubmix.json +++ b/internal/providers/configs/aihubmix.json @@ -2143,6 +2143,24 @@ "default_reasoning_effort": "medium", "supports_attachments": true }, + { + "id": "gpt-5.5-free", + "name": "GPT 5.5 (free)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1050000, + "default_max_tokens": 128000, + "can_reason": true, + "reasoning_levels": [ + "low", + "medium", + "high" + ], + "default_reasoning_effort": "medium", + "supports_attachments": true + }, { "id": "gpt-oss-120b", "name": "gpt-oss-120b", @@ -2926,10 +2944,10 @@ { "id": "qwen3-max", "name": "Qwen3 Max", - "cost_per_1m_in": 0.34246, - "cost_per_1m_out": 1.36984, - "cost_per_1m_in_cached": 0, - "cost_per_1m_out_cached": 0.34246, + "cost_per_1m_in": 0.4508, + "cost_per_1m_out": 1.8032, + "cost_per_1m_in_cached": 0.5635, + "cost_per_1m_out_cached": 0.09016, "context_window": 262144, "default_max_tokens": 65536, "can_reason": false, @@ -2938,10 +2956,10 @@ { "id": "qwen3-max-2026-01-23", "name": "Qwen3 Max 2026 01-23", - "cost_per_1m_in": 0.34246, - "cost_per_1m_out": 1.36984, - "cost_per_1m_in_cached": 0, - "cost_per_1m_out_cached": 0.34246, + "cost_per_1m_in": 0.4508, + "cost_per_1m_out": 1.8032, + "cost_per_1m_in_cached": 0.5635, + "cost_per_1m_out_cached": 0.09016, "context_window": 252000, "default_max_tokens": 32000, "can_reason": true, diff --git a/internal/providers/configs/ionet.json b/internal/providers/configs/ionet.json index 9904eb6cb13df7cb612ad79a93fc95f075f6d1f8..f8baadb2d07773ea90a8ebbe0f0d50163dbaa073 100644 --- a/internal/providers/configs/ionet.json +++ b/internal/providers/configs/ionet.json @@ -166,9 +166,9 @@ { "id": "moonshotai/Kimi-K2.6", "name": "MoonshotAI: Kimi K2.6", - "cost_per_1m_in": 0.74, - "cost_per_1m_out": 3.49, - "cost_per_1m_in_cached": 0.14, + "cost_per_1m_in": 0.8, + "cost_per_1m_out": 4, + "cost_per_1m_in_cached": 0.25, "cost_per_1m_out_cached": 0, "context_window": 262142, "default_max_tokens": 26214, diff --git a/internal/providers/configs/opencode-go.json b/internal/providers/configs/opencode-go.json index 6372282d3cc7014f643e502757a260adf3288761..b1fca618f5eedff690f74ec125e07163cbe6d960 100644 --- a/internal/providers/configs/opencode-go.json +++ b/internal/providers/configs/opencode-go.json @@ -99,10 +99,10 @@ }, { "id": "kimi-k2.6", - "name": "Kimi K2.6 (3x limits)", - "cost_per_1m_in": 0.32, - "cost_per_1m_out": 1.34, - "cost_per_1m_in_cached": 0.05, + "name": "Kimi K2.6", + "cost_per_1m_in": 0.95, + "cost_per_1m_out": 4, + "cost_per_1m_in_cached": 0.16, "cost_per_1m_out_cached": 0, "context_window": 262144, "default_max_tokens": 65536, diff --git a/internal/providers/configs/opencode-zen.json b/internal/providers/configs/opencode-zen.json index 03dbfda415fdd965582ed599108ce9eff12a00a1..734a677ebceb19d78c68766d27c4759866e65026 100644 --- a/internal/providers/configs/opencode-zen.json +++ b/internal/providers/configs/opencode-zen.json @@ -244,9 +244,9 @@ { "id": "gpt-5-nano", "name": "GPT-5 Nano", - "cost_per_1m_in": 0, - "cost_per_1m_out": 0, - "cost_per_1m_in_cached": 0, + "cost_per_1m_in": 0.05, + "cost_per_1m_out": 0.4, + "cost_per_1m_in_cached": 0.01, "cost_per_1m_out_cached": 0, "context_window": 400000, "default_max_tokens": 128000, diff --git a/internal/providers/configs/openrouter.json b/internal/providers/configs/openrouter.json index c4af5dec71777614aabc6dc0f6f6e70b4e1d4186..ce9f522ecceba2740fb610c099fff10e0d738ec3 100644 --- a/internal/providers/configs/openrouter.json +++ b/internal/providers/configs/openrouter.json @@ -19,18 +19,6 @@ "can_reason": false, "supports_attachments": false }, - { - "id": "allenai/olmo-3.1-32b-instruct", - "name": "AllenAI: Olmo 3.1 32B Instruct", - "cost_per_1m_in": 0.2, - "cost_per_1m_out": 0.6, - "cost_per_1m_in_cached": 0, - "cost_per_1m_out_cached": 0, - "context_window": 65536, - "default_max_tokens": 8192, - "can_reason": false, - "supports_attachments": false - }, { "id": "amazon/nova-2-lite-v1", "name": "Amazon: Nova 2 Lite", @@ -568,10 +556,10 @@ { "id": "deepseek/deepseek-chat-v3.1", "name": "DeepSeek: DeepSeek V3.1", - "cost_per_1m_in": 0.21, - "cost_per_1m_out": 0.79, + "cost_per_1m_in": 0.6, + "cost_per_1m_out": 1.7, "cost_per_1m_in_cached": 0, - "cost_per_1m_out_cached": 0.13, + "cost_per_1m_out_cached": 0, "context_window": 163840, "default_max_tokens": 16384, "can_reason": true, @@ -587,11 +575,11 @@ "id": "deepseek/deepseek-v3.1-terminus", "name": "DeepSeek: DeepSeek V3.1 Terminus", "cost_per_1m_in": 0.27, - "cost_per_1m_out": 1, + "cost_per_1m_out": 0.95, "cost_per_1m_in_cached": 0, - "cost_per_1m_out_cached": 0, + "cost_per_1m_out_cached": 0.13, "context_window": 163840, - "default_max_tokens": 81920, + "default_max_tokens": 16384, "can_reason": true, "reasoning_levels": [ "low", @@ -604,12 +592,12 @@ { "id": "deepseek/deepseek-v3.2", "name": "DeepSeek: DeepSeek V3.2", - "cost_per_1m_in": 0.26, - "cost_per_1m_out": 0.38, + "cost_per_1m_in": 0.5, + "cost_per_1m_out": 1.5, "cost_per_1m_in_cached": 0, - "cost_per_1m_out_cached": 0.13, + "cost_per_1m_out_cached": 0.25, "context_window": 163840, - "default_max_tokens": 8192, + "default_max_tokens": 81920, "can_reason": true, "reasoning_levels": [ "low", @@ -625,9 +613,9 @@ "cost_per_1m_in": 0.27, "cost_per_1m_out": 0.41, "cost_per_1m_in_cached": 0, - "cost_per_1m_out_cached": 0.27, + "cost_per_1m_out_cached": 0, "context_window": 163840, - "default_max_tokens": 81920, + "default_max_tokens": 32768, "can_reason": true, "reasoning_levels": [ "low", @@ -694,12 +682,12 @@ { "id": "deepseek/deepseek-r1-0528", "name": "DeepSeek: R1 0528", - "cost_per_1m_in": 0.5, - "cost_per_1m_out": 2.18, + "cost_per_1m_in": 0.55, + "cost_per_1m_out": 2.15, "cost_per_1m_in_cached": 0, - "cost_per_1m_out_cached": 0, - "context_window": 163840, - "default_max_tokens": 81920, + "cost_per_1m_out_cached": 0.35, + "context_window": 131072, + "default_max_tokens": 65536, "can_reason": true, "reasoning_levels": [ "low", @@ -952,12 +940,12 @@ { "id": "google/gemma-4-26b-a4b-it", "name": "Google: Gemma 4 26B A4B ", - "cost_per_1m_in": 0.15, - "cost_per_1m_out": 0.5, + "cost_per_1m_in": 0.07, + "cost_per_1m_out": 0.34, "cost_per_1m_in_cached": 0, - "cost_per_1m_out_cached": 0.15, + "cost_per_1m_out_cached": 0, "context_window": 262144, - "default_max_tokens": 131072, + "default_max_tokens": 8192, "can_reason": true, "reasoning_levels": [ "low", @@ -988,12 +976,12 @@ { "id": "google/gemma-4-31b-it", "name": "Google: Gemma 4 31B", - "cost_per_1m_in": 0.13, - "cost_per_1m_out": 0.38, + "cost_per_1m_in": 0.14, + "cost_per_1m_out": 0.4, "cost_per_1m_in_cached": 0, "cost_per_1m_out_cached": 0, "context_window": 262144, - "default_max_tokens": 8192, + "default_max_tokens": 65536, "can_reason": true, "reasoning_levels": [ "low", @@ -1078,10 +1066,10 @@ { "id": "meta-llama/llama-3.3-70b-instruct", "name": "Meta: Llama 3.3 70B Instruct", - "cost_per_1m_in": 0.6, - "cost_per_1m_out": 0.6, + "cost_per_1m_in": 0.12, + "cost_per_1m_out": 0.38, "cost_per_1m_in_cached": 0, - "cost_per_1m_out_cached": 0.6, + "cost_per_1m_out_cached": 0, "context_window": 131072, "default_max_tokens": 65536, "can_reason": false, @@ -1132,12 +1120,12 @@ { "id": "minimax/minimax-m2.1", "name": "MiniMax: MiniMax M2.1", - "cost_per_1m_in": 0.3, - "cost_per_1m_out": 1.2, + "cost_per_1m_in": 0.29, + "cost_per_1m_out": 0.95, "cost_per_1m_in_cached": 0, "cost_per_1m_out_cached": 0.03, - "context_window": 204800, - "default_max_tokens": 65536, + "context_window": 196608, + "default_max_tokens": 98304, "can_reason": true, "reasoning_levels": [ "low", @@ -1300,24 +1288,24 @@ { "id": "mistralai/ministral-3b-2512", "name": "Mistral: Ministral 3 3B 2512", - "cost_per_1m_in": 0.15, - "cost_per_1m_out": 0.15, + "cost_per_1m_in": 0.1, + "cost_per_1m_out": 0.1, "cost_per_1m_in_cached": 0, - "cost_per_1m_out_cached": 0, + "cost_per_1m_out_cached": 0.01, "context_window": 131072, - "default_max_tokens": 65536, + "default_max_tokens": 13107, "can_reason": false, "supports_attachments": true }, { "id": "mistralai/ministral-8b-2512", "name": "Mistral: Ministral 3 8B 2512", - "cost_per_1m_in": 0.3, - "cost_per_1m_out": 0.3, + "cost_per_1m_in": 0.15, + "cost_per_1m_out": 0.15, "cost_per_1m_in_cached": 0, - "cost_per_1m_out_cached": 0, + "cost_per_1m_out_cached": 0.015, "context_window": 262144, - "default_max_tokens": 131072, + "default_max_tokens": 26214, "can_reason": false, "supports_attachments": true }, @@ -1492,12 +1480,12 @@ { "id": "moonshotai/kimi-k2-0905", "name": "MoonshotAI: Kimi K2 0905", - "cost_per_1m_in": 0.6, - "cost_per_1m_out": 2.5, + "cost_per_1m_in": 1, + "cost_per_1m_out": 3, "cost_per_1m_in_cached": 0, - "cost_per_1m_out_cached": 0, + "cost_per_1m_out_cached": 0.5, "context_window": 262144, - "default_max_tokens": 131072, + "default_max_tokens": 8192, "can_reason": false, "supports_attachments": false }, @@ -1522,12 +1510,12 @@ { "id": "moonshotai/kimi-k2.5", "name": "MoonshotAI: Kimi K2.5", - "cost_per_1m_in": 0.45, - "cost_per_1m_out": 2.25, + "cost_per_1m_in": 0.44, + "cost_per_1m_out": 2.2, "cost_per_1m_in_cached": 0, - "cost_per_1m_out_cached": 0.07, + "cost_per_1m_out_cached": 0.22, "context_window": 262144, - "default_max_tokens": 32000, + "default_max_tokens": 131072, "can_reason": true, "reasoning_levels": [ "low", @@ -1540,12 +1528,12 @@ { "id": "moonshotai/kimi-k2.6", "name": "MoonshotAI: Kimi K2.6", - "cost_per_1m_in": 0.95, - "cost_per_1m_out": 4, + "cost_per_1m_in": 0.75, + "cost_per_1m_out": 3.5, "cost_per_1m_in_cached": 0, - "cost_per_1m_out_cached": 0.16, + "cost_per_1m_out_cached": 0.15, "context_window": 262144, - "default_max_tokens": 26214, + "default_max_tokens": 8192, "can_reason": true, "reasoning_levels": [ "low", @@ -1855,7 +1843,7 @@ "cost_per_1m_in": 2.5, "cost_per_1m_out": 10, "cost_per_1m_in_cached": 0, - "cost_per_1m_out_cached": 0, + "cost_per_1m_out_cached": 1.25, "context_window": 128000, "default_max_tokens": 8192, "can_reason": false, @@ -2133,7 +2121,7 @@ "cost_per_1m_in_cached": 0, "cost_per_1m_out_cached": 0.175, "context_window": 128000, - "default_max_tokens": 16000, + "default_max_tokens": 8192, "can_reason": false, "supports_attachments": true }, @@ -2314,12 +2302,12 @@ { "id": "openai/gpt-oss-120b", "name": "OpenAI: gpt-oss-120b", - "cost_per_1m_in": 0.05, - "cost_per_1m_out": 0.25, + "cost_per_1m_in": 0.14, + "cost_per_1m_out": 0.95, "cost_per_1m_in_cached": 0, "cost_per_1m_out_cached": 0, "context_window": 131072, - "default_max_tokens": 16384, + "default_max_tokens": 65536, "can_reason": true, "reasoning_levels": [ "low", @@ -2818,12 +2806,12 @@ { "id": "qwen/qwen3-30b-a3b-instruct-2507", "name": "Qwen: Qwen3 30B A3B Instruct 2507", - "cost_per_1m_in": 0.1, + "cost_per_1m_in": 0.09, "cost_per_1m_out": 0.3, "cost_per_1m_in_cached": 0, "cost_per_1m_out_cached": 0, "context_window": 262144, - "default_max_tokens": 26214, + "default_max_tokens": 131072, "can_reason": false, "supports_attachments": false }, @@ -2885,11 +2873,11 @@ "id": "qwen/qwen3-coder-30b-a3b-instruct", "name": "Qwen: Qwen3 Coder 30B A3B Instruct", "cost_per_1m_in": 0.07, - "cost_per_1m_out": 0.27, + "cost_per_1m_out": 0.28, "cost_per_1m_in_cached": 0, "cost_per_1m_out_cached": 0, - "context_window": 160000, - "default_max_tokens": 16384, + "context_window": 262144, + "default_max_tokens": 131072, "can_reason": false, "supports_attachments": false }, @@ -2932,10 +2920,10 @@ { "id": "qwen/qwen3-coder-next", "name": "Qwen: Qwen3 Coder Next", - "cost_per_1m_in": 0.14, + "cost_per_1m_in": 0.11, "cost_per_1m_out": 0.8, "cost_per_1m_in_cached": 0, - "cost_per_1m_out_cached": 0.09, + "cost_per_1m_out_cached": 0.07, "context_window": 262144, "default_max_tokens": 131072, "can_reason": false, @@ -3028,11 +3016,11 @@ { "id": "qwen/qwen3-vl-235b-a22b-instruct", "name": "Qwen: Qwen3 VL 235B A22B Instruct", - "cost_per_1m_in": 0.2, - "cost_per_1m_out": 0.88, + "cost_per_1m_in": 0.25, + "cost_per_1m_out": 1.5, "cost_per_1m_in_cached": 0, - "cost_per_1m_out_cached": 0.11, - "context_window": 262144, + "cost_per_1m_out_cached": 0, + "context_window": 256000, "default_max_tokens": 8192, "can_reason": false, "supports_attachments": true @@ -3130,8 +3118,8 @@ { "id": "qwen/qwen3.5-397b-a17b", "name": "Qwen: Qwen3.5 397B A17B", - "cost_per_1m_in": 0.6, - "cost_per_1m_out": 3.6, + "cost_per_1m_in": 0.39, + "cost_per_1m_out": 2.34, "cost_per_1m_in_cached": 0, "cost_per_1m_out_cached": 0, "context_window": 262144, @@ -3202,10 +3190,10 @@ { "id": "qwen/qwen3.5-27b", "name": "Qwen: Qwen3.5-27B", - "cost_per_1m_in": 0.3, - "cost_per_1m_out": 2.4, + "cost_per_1m_in": 0.27, + "cost_per_1m_out": 2.16, "cost_per_1m_in_cached": 0, - "cost_per_1m_out_cached": 0, + "cost_per_1m_out_cached": 0.27, "context_window": 262144, "default_max_tokens": 32768, "can_reason": true, @@ -3242,8 +3230,8 @@ "cost_per_1m_out": 0.15, "cost_per_1m_in_cached": 0, "cost_per_1m_out_cached": 0, - "context_window": 256000, - "default_max_tokens": 16384, + "context_window": 262144, + "default_max_tokens": 26214, "can_reason": true, "reasoning_levels": [ "low", @@ -3274,10 +3262,10 @@ { "id": "qwen/qwen3.6-27b", "name": "Qwen: Qwen3.6 27B", - "cost_per_1m_in": 0.5, - "cost_per_1m_out": 2, + "cost_per_1m_in": 0.6, + "cost_per_1m_out": 3.6, "cost_per_1m_in_cached": 0, - "cost_per_1m_out_cached": 0.25, + "cost_per_1m_out_cached": 0, "context_window": 262144, "default_max_tokens": 32768, "can_reason": true, @@ -3292,10 +3280,10 @@ { "id": "qwen/qwen3.6-35b-a3b", "name": "Qwen: Qwen3.6 35B A3B", - "cost_per_1m_in": 0.15, - "cost_per_1m_out": 1, + "cost_per_1m_in": 0.23, + "cost_per_1m_out": 1.8, "cost_per_1m_in_cached": 0, - "cost_per_1m_out_cached": 0.05, + "cost_per_1m_out_cached": 0, "context_window": 262144, "default_max_tokens": 131072, "can_reason": true, @@ -3379,9 +3367,9 @@ "cost_per_1m_in": 0.1, "cost_per_1m_out": 0.3, "cost_per_1m_in_cached": 0, - "cost_per_1m_out_cached": 0, + "cost_per_1m_out_cached": 0.03, "context_window": 262144, - "default_max_tokens": 32768, + "default_max_tokens": 131072, "can_reason": true, "reasoning_levels": [ "low", @@ -3597,7 +3585,7 @@ "cost_per_1m_in_cached": 0, "cost_per_1m_out_cached": 0.11, "context_window": 131072, - "default_max_tokens": 48000, + "default_max_tokens": 49152, "can_reason": true, "reasoning_levels": [ "low", @@ -3700,12 +3688,12 @@ { "id": "z-ai/glm-4.7", "name": "Z.ai: GLM 4.7", - "cost_per_1m_in": 0.38, - "cost_per_1m_out": 1.74, + "cost_per_1m_in": 0.45, + "cost_per_1m_out": 2.1, "cost_per_1m_in_cached": 0, - "cost_per_1m_out_cached": 0, + "cost_per_1m_out_cached": 0.11, "context_window": 202752, - "default_max_tokens": 20275, + "default_max_tokens": 101376, "can_reason": true, "reasoning_levels": [ "low", @@ -3736,11 +3724,11 @@ { "id": "z-ai/glm-5", "name": "Z.ai: GLM 5", - "cost_per_1m_in": 0.95, - "cost_per_1m_out": 2.55, + "cost_per_1m_in": 1, + "cost_per_1m_out": 3.2, "cost_per_1m_in_cached": 0, "cost_per_1m_out_cached": 0.2, - "context_window": 204800, + "context_window": 202800, "default_max_tokens": 65536, "can_reason": true, "reasoning_levels": [ diff --git a/internal/providers/configs/venice.json b/internal/providers/configs/venice.json index 47bc6b329454cab8613c6eabdba70df7d671bb35..863b3915eb59790c047174c64600529516b25c8e 100644 --- a/internal/providers/configs/venice.json +++ b/internal/providers/configs/venice.json @@ -544,8 +544,8 @@ { "id": "grok-4-20", "name": "Grok 4.20", - "cost_per_1m_in": 2.27, - "cost_per_1m_out": 6.8, + "cost_per_1m_in": 1.42, + "cost_per_1m_out": 2.83, "cost_per_1m_in_cached": 0, "cost_per_1m_out_cached": 0, "context_window": 2000000, diff --git a/internal/providers/configs/vercel.json b/internal/providers/configs/vercel.json index 1ee2f9047c56395c4b682491f8e8bc53947d4614..43e7ecf9c2f2e0a884f29b76b7467229d035d7d5 100644 --- a/internal/providers/configs/vercel.json +++ b/internal/providers/configs/vercel.json @@ -1468,8 +1468,8 @@ { "id": "xai/grok-4.20-non-reasoning-beta", "name": "Grok 4.20 Beta Non-Reasoning", - "cost_per_1m_in": 2, - "cost_per_1m_out": 6, + "cost_per_1m_in": 1.25, + "cost_per_1m_out": 2.5, "cost_per_1m_in_cached": 0.2, "cost_per_1m_out_cached": 0, "context_window": 2000000, @@ -1480,8 +1480,8 @@ { "id": "xai/grok-4.20-reasoning-beta", "name": "Grok 4.20 Beta Reasoning", - "cost_per_1m_in": 2, - "cost_per_1m_out": 6, + "cost_per_1m_in": 1.25, + "cost_per_1m_out": 2.5, "cost_per_1m_in_cached": 0.2, "cost_per_1m_out_cached": 0, "context_window": 2000000, @@ -1498,8 +1498,8 @@ { "id": "xai/grok-4.20-multi-agent-beta", "name": "Grok 4.20 Multi Agent Beta", - "cost_per_1m_in": 2, - "cost_per_1m_out": 6, + "cost_per_1m_in": 1.25, + "cost_per_1m_out": 2.5, "cost_per_1m_in_cached": 0.2, "cost_per_1m_out_cached": 0, "context_window": 2000000, @@ -1516,8 +1516,8 @@ { "id": "xai/grok-4.20-multi-agent", "name": "Grok 4.20 Multi-Agent", - "cost_per_1m_in": 2, - "cost_per_1m_out": 6, + "cost_per_1m_in": 1.25, + "cost_per_1m_out": 2.5, "cost_per_1m_in_cached": 0.2, "cost_per_1m_out_cached": 0, "context_window": 2000000, @@ -1534,8 +1534,8 @@ { "id": "xai/grok-4.20-non-reasoning", "name": "Grok 4.20 Non-Reasoning", - "cost_per_1m_in": 2, - "cost_per_1m_out": 6, + "cost_per_1m_in": 1.25, + "cost_per_1m_out": 2.5, "cost_per_1m_in_cached": 0.2, "cost_per_1m_out_cached": 0, "context_window": 2000000, @@ -1546,8 +1546,8 @@ { "id": "xai/grok-4.20-reasoning", "name": "Grok 4.20 Reasoning", - "cost_per_1m_in": 2, - "cost_per_1m_out": 6, + "cost_per_1m_in": 1.25, + "cost_per_1m_out": 2.5, "cost_per_1m_in_cached": 0.2, "cost_per_1m_out_cached": 0, "context_window": 2000000,