@@ -66,21 +66,6 @@
"api_key": "$SYNTHETIC_API_KEY",
"base_url": "https://api.synthetic.new/openai/v1",
"models": [
- {
- "id": "hf:deepseek-ai/DeepSeek-R1-0528",
- "name": "DeepSeek R1 0528",
- "cost_per_1m_in": 3.0,
- "cost_per_1m_out": 8.0,
- "cost_per_1m_in_cached": 3.0,
- "cost_per_1m_out_cached": 8.0,
- "context_window": 131072,
- "default_max_tokens": 65536,
- "can_reason": true,
- "has_reasoning_efforts": true,
- "reasoning_levels": ["low", "medium", "high"],
- "default_reasoning_effort": "high",
- "supports_attachments": false
- },
{
"id": "hf:deepseek-ai/DeepSeek-V3.1-Terminus",
"name": "DeepSeek V3.1 Terminus",
@@ -111,18 +96,6 @@
"default_reasoning_effort": "high",
"supports_attachments": false
},
- {
- "id": "hf:moonshotai/Kimi-K2-Instruct-0905",
- "name": "Kimi K2 Instruct 0905",
- "cost_per_1m_in": 1.2,
- "cost_per_1m_out": 1.2,
- "cost_per_1m_in_cached": 1.2,
- "cost_per_1m_out_cached": 1.2,
- "context_window": 262144,
- "default_max_tokens": 262144,
- "can_reason": false,
- "supports_attachments": false
- },
{
"id": "hf:openai/gpt-oss-120b",
"name": "GPT-OSS 120B",
@@ -138,21 +111,6 @@
"default_reasoning_effort": "high",
"supports_attachments": false
},
- {
- "id": "hf:Qwen/Qwen3-235B-A22B-Thinking-2507",
- "name": "Qwen3 235B A22B Thinking 2507",
- "cost_per_1m_in": 0.65,
- "cost_per_1m_out": 3.0,
- "cost_per_1m_in_cached": 0.65,
- "cost_per_1m_out_cached": 3.0,
- "context_window": 262144,
- "default_max_tokens": 81920,
- "can_reason": true,
- "has_reasoning_efforts": true,
- "reasoning_levels": ["low", "medium", "high"],
- "default_reasoning_effort": "high",
- "supports_attachments": false
- },
{
"id": "hf:Qwen/Qwen3-Coder-480B-A35B-Instruct",
"name": "Qwen3 Coder 480B A35B Instruct",
@@ -179,6 +137,21 @@
"reasoning_levels": ["low", "medium", "high"],
"default_reasoning_effort": "high",
"supports_attachments": false
+ },
+ {
+ "id": "hf:moonshotai/Kimi-K2-Thinking",
+ "name": "Kimi K2 THinking",
+ "cost_per_1m_in": 0.55,
+ "cost_per_1m_out": 2.19,
+ "cost_per_1m_in_cached": 0.55,
+ "cost_per_1m_out_cached": 2.19,
+ "context_window": 204800,
+ "default_max_tokens": 65536,
+ "can_reason": true,
+ "has_reasoning_efforts": true,
+ "reasoning_levels": ["low", "medium", "high"],
+ "default_reasoning_effort": "high",
+ "supports_attachments": false
}
]
},