synthetic.json

  1{
  2  "name": "Synthetic",
  3  "id": "synthetic",
  4  "api_key": "$SYNTHETIC_API_KEY",
  5  "api_endpoint": "https://api.synthetic.new/openai/v1",
  6  "type": "openai-compat",
  7  "default_large_model_id": "hf:zai-org/GLM-4.7",
  8  "default_small_model_id": "hf:deepseek-ai/DeepSeek-V3.2",
  9  "models": [
 10    {
 11      "id": "hf:deepseek-ai/DeepSeek-R1-0528",
 12      "name": "DeepSeek R1 0528",
 13      "cost_per_1m_in": 3,
 14      "cost_per_1m_out": 8,
 15      "cost_per_1m_in_cached": 3,
 16      "cost_per_1m_out_cached": 3,
 17      "context_window": 131072,
 18      "default_max_tokens": 13107,
 19      "can_reason": true,
 20      "reasoning_levels": [
 21        "low",
 22        "medium",
 23        "high"
 24      ],
 25      "default_reasoning_effort": "medium",
 26      "supports_attachments": false,
 27      "options": {}
 28    },
 29    {
 30      "id": "hf:deepseek-ai/DeepSeek-V3",
 31      "name": "DeepSeek V3",
 32      "cost_per_1m_in": 1.25,
 33      "cost_per_1m_out": 1.25,
 34      "cost_per_1m_in_cached": 1.25,
 35      "cost_per_1m_out_cached": 1.25,
 36      "context_window": 131072,
 37      "default_max_tokens": 13107,
 38      "can_reason": false,
 39      "supports_attachments": false,
 40      "options": {}
 41    },
 42    {
 43      "id": "hf:deepseek-ai/DeepSeek-V3-0324",
 44      "name": "DeepSeek V3 0324",
 45      "cost_per_1m_in": 1.2,
 46      "cost_per_1m_out": 1.2,
 47      "cost_per_1m_in_cached": 1.2,
 48      "cost_per_1m_out_cached": 1.2,
 49      "context_window": 131072,
 50      "default_max_tokens": 13107,
 51      "can_reason": false,
 52      "supports_attachments": false,
 53      "options": {}
 54    },
 55    {
 56      "id": "hf:deepseek-ai/DeepSeek-V3.2",
 57      "name": "DeepSeek V3.2",
 58      "cost_per_1m_in": 0.56,
 59      "cost_per_1m_out": 1.68,
 60      "cost_per_1m_in_cached": 0.56,
 61      "cost_per_1m_out_cached": 0.56,
 62      "context_window": 162816,
 63      "default_max_tokens": 16281,
 64      "can_reason": true,
 65      "reasoning_levels": [
 66        "low",
 67        "medium",
 68        "high"
 69      ],
 70      "default_reasoning_effort": "medium",
 71      "supports_attachments": false,
 72      "options": {}
 73    },
 74    {
 75      "id": "hf:zai-org/GLM-4.7",
 76      "name": "GLM 4.7",
 77      "cost_per_1m_in": 0.55,
 78      "cost_per_1m_out": 2.1900000000000004,
 79      "cost_per_1m_in_cached": 0.55,
 80      "cost_per_1m_out_cached": 0.55,
 81      "context_window": 202752,
 82      "default_max_tokens": 20275,
 83      "can_reason": true,
 84      "reasoning_levels": [
 85        "low",
 86        "medium",
 87        "high"
 88      ],
 89      "default_reasoning_effort": "medium",
 90      "supports_attachments": false,
 91      "options": {}
 92    },
 93    {
 94      "id": "hf:moonshotai/Kimi-K2-Instruct-0905",
 95      "name": "Kimi K2 Instruct 0905",
 96      "cost_per_1m_in": 1.2,
 97      "cost_per_1m_out": 1.2,
 98      "cost_per_1m_in_cached": 1.2,
 99      "cost_per_1m_out_cached": 1.2,
100      "context_window": 262144,
101      "default_max_tokens": 26214,
102      "can_reason": false,
103      "supports_attachments": false,
104      "options": {}
105    },
106    {
107      "id": "hf:moonshotai/Kimi-K2-Thinking",
108      "name": "Kimi K2 Thinking",
109      "cost_per_1m_in": 0.6,
110      "cost_per_1m_out": 2.5,
111      "cost_per_1m_in_cached": 0.6,
112      "cost_per_1m_out_cached": 0.6,
113      "context_window": 262144,
114      "default_max_tokens": 26214,
115      "can_reason": true,
116      "reasoning_levels": [
117        "low",
118        "medium",
119        "high"
120      ],
121      "default_reasoning_effort": "medium",
122      "supports_attachments": false,
123      "options": {}
124    },
125    {
126      "id": "hf:moonshotai/Kimi-K2.5",
127      "name": "Kimi K2.5",
128      "cost_per_1m_in": 0.6,
129      "cost_per_1m_out": 3,
130      "cost_per_1m_in_cached": 0.6,
131      "cost_per_1m_out_cached": 0.6,
132      "context_window": 262144,
133      "default_max_tokens": 32768,
134      "can_reason": true,
135      "reasoning_levels": [
136        "low",
137        "medium",
138        "high"
139      ],
140      "default_reasoning_effort": "medium",
141      "supports_attachments": true,
142      "options": {}
143    },
144    {
145      "id": "hf:nvidia/Kimi-K2.5-NVFP4",
146      "name": "Kimi K2.5 NVFP4",
147      "cost_per_1m_in": 0.6,
148      "cost_per_1m_out": 3,
149      "cost_per_1m_in_cached": 0.6,
150      "cost_per_1m_out_cached": 0.6,
151      "context_window": 262144,
152      "default_max_tokens": 32768,
153      "can_reason": true,
154      "reasoning_levels": [
155        "low",
156        "medium",
157        "high"
158      ],
159      "default_reasoning_effort": "medium",
160      "supports_attachments": true,
161      "options": {}
162    },
163    {
164      "id": "hf:meta-llama/Llama-3.3-70B-Instruct",
165      "name": "Llama 3.3 70B Instruct",
166      "cost_per_1m_in": 0.8999999999999999,
167      "cost_per_1m_out": 0.8999999999999999,
168      "cost_per_1m_in_cached": 0.8999999999999999,
169      "cost_per_1m_out_cached": 0.8999999999999999,
170      "context_window": 131072,
171      "default_max_tokens": 13107,
172      "can_reason": false,
173      "supports_attachments": false,
174      "options": {}
175    },
176    {
177      "id": "hf:MiniMaxAI/MiniMax-M2.1",
178      "name": "MiniMax M2.1",
179      "cost_per_1m_in": 0.3,
180      "cost_per_1m_out": 1.2,
181      "cost_per_1m_in_cached": 0.3,
182      "cost_per_1m_out_cached": 0.3,
183      "context_window": 196608,
184      "default_max_tokens": 19660,
185      "can_reason": true,
186      "reasoning_levels": [
187        "low",
188        "medium",
189        "high"
190      ],
191      "default_reasoning_effort": "medium",
192      "supports_attachments": false,
193      "options": {}
194    },
195    {
196      "id": "hf:MiniMaxAI/MiniMax-M2.5",
197      "name": "MiniMax M2.5",
198      "cost_per_1m_in": 0.6,
199      "cost_per_1m_out": 3,
200      "cost_per_1m_in_cached": 0.6,
201      "cost_per_1m_out_cached": 0.6,
202      "context_window": 191488,
203      "default_max_tokens": 19148,
204      "can_reason": true,
205      "reasoning_levels": [
206        "low",
207        "medium",
208        "high"
209      ],
210      "default_reasoning_effort": "medium",
211      "supports_attachments": false,
212      "options": {}
213    },
214    {
215      "id": "hf:Qwen/Qwen3-235B-A22B-Thinking-2507",
216      "name": "Qwen3 235B A22B Thinking 2507",
217      "cost_per_1m_in": 0.65,
218      "cost_per_1m_out": 3,
219      "cost_per_1m_in_cached": 0.65,
220      "cost_per_1m_out_cached": 0.65,
221      "context_window": 262144,
222      "default_max_tokens": 26214,
223      "can_reason": true,
224      "reasoning_levels": [
225        "low",
226        "medium",
227        "high"
228      ],
229      "default_reasoning_effort": "medium",
230      "supports_attachments": false,
231      "options": {}
232    },
233    {
234      "id": "hf:Qwen/Qwen3-Coder-480B-A35B-Instruct",
235      "name": "Qwen3 Coder 480B A35B Instruct",
236      "cost_per_1m_in": 2,
237      "cost_per_1m_out": 2,
238      "cost_per_1m_in_cached": 2,
239      "cost_per_1m_out_cached": 2,
240      "context_window": 262144,
241      "default_max_tokens": 26214,
242      "can_reason": false,
243      "supports_attachments": false,
244      "options": {}
245    },
246    {
247      "id": "hf:Qwen/Qwen3.5-397B-A17B",
248      "name": "Qwen3.5 397B A17B",
249      "cost_per_1m_in": 0.6,
250      "cost_per_1m_out": 3,
251      "cost_per_1m_in_cached": 0.6,
252      "cost_per_1m_out_cached": 0.6,
253      "context_window": 262144,
254      "default_max_tokens": 32768,
255      "can_reason": false,
256      "supports_attachments": true,
257      "options": {}
258    },
259    {
260      "id": "hf:openai/gpt-oss-120b",
261      "name": "gpt oss 120b",
262      "cost_per_1m_in": 0.09999999999999999,
263      "cost_per_1m_out": 0.09999999999999999,
264      "cost_per_1m_in_cached": 0.09999999999999999,
265      "cost_per_1m_out_cached": 0.09999999999999999,
266      "context_window": 131072,
267      "default_max_tokens": 13107,
268      "can_reason": true,
269      "reasoning_levels": [
270        "low",
271        "medium",
272        "high"
273      ],
274      "default_reasoning_effort": "medium",
275      "supports_attachments": false,
276      "options": {}
277    }
278  ]
279}