synthetic.json

  1{
  2  "name": "Synthetic",
  3  "id": "synthetic",
  4  "api_key": "$SYNTHETIC_API_KEY",
  5  "api_endpoint": "https://api.synthetic.new/openai/v1",
  6  "type": "openai-compat",
  7  "default_large_model_id": "hf:zai-org/GLM-4.6",
  8  "default_small_model_id": "hf:deepseek-ai/DeepSeek-V3.1-Terminus",
  9  "models": [
 10    {
 11      "id": "hf:deepseek-ai/DeepSeek-R1",
 12      "name": "DeepSeek R1",
 13      "cost_per_1m_in": 0,
 14      "cost_per_1m_out": 0,
 15      "cost_per_1m_in_cached": 0,
 16      "cost_per_1m_out_cached": 0,
 17      "context_window": 131072,
 18      "default_max_tokens": 13107,
 19      "can_reason": true,
 20      "reasoning_levels": [
 21        "low",
 22        "medium",
 23        "high"
 24      ],
 25      "default_reasoning_effort": "medium",
 26      "supports_attachments": false,
 27      "options": {}
 28    },
 29    {
 30      "id": "hf:deepseek-ai/DeepSeek-R1-0528",
 31      "name": "DeepSeek R1 0528",
 32      "cost_per_1m_in": 0,
 33      "cost_per_1m_out": 0,
 34      "cost_per_1m_in_cached": 0,
 35      "cost_per_1m_out_cached": 0,
 36      "context_window": 131072,
 37      "default_max_tokens": 13107,
 38      "can_reason": true,
 39      "reasoning_levels": [
 40        "low",
 41        "medium",
 42        "high"
 43      ],
 44      "default_reasoning_effort": "medium",
 45      "supports_attachments": false,
 46      "options": {}
 47    },
 48    {
 49      "id": "hf:deepseek-ai/DeepSeek-V3",
 50      "name": "DeepSeek V3",
 51      "cost_per_1m_in": 0,
 52      "cost_per_1m_out": 0,
 53      "cost_per_1m_in_cached": 0,
 54      "cost_per_1m_out_cached": 0,
 55      "context_window": 131072,
 56      "default_max_tokens": 13107,
 57      "can_reason": false,
 58      "supports_attachments": false,
 59      "options": {}
 60    },
 61    {
 62      "id": "hf:deepseek-ai/DeepSeek-V3-0324",
 63      "name": "DeepSeek V3 0324",
 64      "cost_per_1m_in": 0,
 65      "cost_per_1m_out": 0,
 66      "cost_per_1m_in_cached": 0,
 67      "cost_per_1m_out_cached": 0,
 68      "context_window": 131072,
 69      "default_max_tokens": 13107,
 70      "can_reason": false,
 71      "supports_attachments": false,
 72      "options": {}
 73    },
 74    {
 75      "id": "hf:deepseek-ai/DeepSeek-V3.1",
 76      "name": "DeepSeek V3.1",
 77      "cost_per_1m_in": 0,
 78      "cost_per_1m_out": 0,
 79      "cost_per_1m_in_cached": 0,
 80      "cost_per_1m_out_cached": 0,
 81      "context_window": 131072,
 82      "default_max_tokens": 13107,
 83      "can_reason": true,
 84      "reasoning_levels": [
 85        "low",
 86        "medium",
 87        "high"
 88      ],
 89      "default_reasoning_effort": "medium",
 90      "supports_attachments": false,
 91      "options": {}
 92    },
 93    {
 94      "id": "hf:deepseek-ai/DeepSeek-V3.1-Terminus",
 95      "name": "DeepSeek V3.1 Terminus",
 96      "cost_per_1m_in": 0,
 97      "cost_per_1m_out": 0,
 98      "cost_per_1m_in_cached": 0,
 99      "cost_per_1m_out_cached": 0,
100      "context_window": 131072,
101      "default_max_tokens": 13107,
102      "can_reason": true,
103      "reasoning_levels": [
104        "low",
105        "medium",
106        "high"
107      ],
108      "default_reasoning_effort": "medium",
109      "supports_attachments": false,
110      "options": {}
111    },
112    {
113      "id": "hf:deepseek-ai/DeepSeek-V3.2",
114      "name": "DeepSeek V3.2",
115      "cost_per_1m_in": 0,
116      "cost_per_1m_out": 0,
117      "cost_per_1m_in_cached": 0,
118      "cost_per_1m_out_cached": 0,
119      "context_window": 162816,
120      "default_max_tokens": 16281,
121      "can_reason": false,
122      "supports_attachments": false,
123      "options": {}
124    },
125    {
126      "id": "hf:zai-org/GLM-4.5",
127      "name": "GLM 4.5",
128      "cost_per_1m_in": 0,
129      "cost_per_1m_out": 0,
130      "cost_per_1m_in_cached": 0,
131      "cost_per_1m_out_cached": 0,
132      "context_window": 131072,
133      "default_max_tokens": 13107,
134      "can_reason": false,
135      "supports_attachments": false,
136      "options": {}
137    },
138    {
139      "id": "hf:zai-org/GLM-4.6",
140      "name": "GLM 4.6",
141      "cost_per_1m_in": 0,
142      "cost_per_1m_out": 0,
143      "cost_per_1m_in_cached": 0,
144      "cost_per_1m_out_cached": 0,
145      "context_window": 202752,
146      "default_max_tokens": 20275,
147      "can_reason": true,
148      "reasoning_levels": [
149        "low",
150        "medium",
151        "high"
152      ],
153      "default_reasoning_effort": "medium",
154      "supports_attachments": false,
155      "options": {}
156    },
157    {
158      "id": "hf:moonshotai/Kimi-K2-Instruct",
159      "name": "Kimi K2 Instruct",
160      "cost_per_1m_in": 0,
161      "cost_per_1m_out": 0,
162      "cost_per_1m_in_cached": 0,
163      "cost_per_1m_out_cached": 0,
164      "context_window": 131072,
165      "default_max_tokens": 13107,
166      "can_reason": false,
167      "supports_attachments": false,
168      "options": {}
169    },
170    {
171      "id": "hf:moonshotai/Kimi-K2-Instruct-0905",
172      "name": "Kimi K2 Instruct 0905",
173      "cost_per_1m_in": 0,
174      "cost_per_1m_out": 0,
175      "cost_per_1m_in_cached": 0,
176      "cost_per_1m_out_cached": 0,
177      "context_window": 262144,
178      "default_max_tokens": 26214,
179      "can_reason": false,
180      "supports_attachments": false,
181      "options": {}
182    },
183    {
184      "id": "hf:moonshotai/Kimi-K2-Thinking",
185      "name": "Kimi K2 Thinking",
186      "cost_per_1m_in": 0,
187      "cost_per_1m_out": 0,
188      "cost_per_1m_in_cached": 0,
189      "cost_per_1m_out_cached": 0,
190      "context_window": 262144,
191      "default_max_tokens": 32768,
192      "can_reason": true,
193      "reasoning_levels": [
194        "low",
195        "medium",
196        "high"
197      ],
198      "default_reasoning_effort": "medium",
199      "supports_attachments": false,
200      "options": {}
201    },
202    {
203      "id": "hf:meta-llama/Llama-3.1-405B-Instruct",
204      "name": "Llama 3.1 405B Instruct",
205      "cost_per_1m_in": 0,
206      "cost_per_1m_out": 0,
207      "cost_per_1m_in_cached": 0,
208      "cost_per_1m_out_cached": 0,
209      "context_window": 131072,
210      "default_max_tokens": 13107,
211      "can_reason": false,
212      "supports_attachments": false,
213      "options": {}
214    },
215    {
216      "id": "hf:meta-llama/Llama-3.1-70B-Instruct",
217      "name": "Llama 3.1 70B Instruct",
218      "cost_per_1m_in": 0,
219      "cost_per_1m_out": 0,
220      "cost_per_1m_in_cached": 0,
221      "cost_per_1m_out_cached": 0,
222      "context_window": 131072,
223      "default_max_tokens": 13107,
224      "can_reason": false,
225      "supports_attachments": false,
226      "options": {}
227    },
228    {
229      "id": "hf:meta-llama/Llama-3.1-8B-Instruct",
230      "name": "Llama 3.1 8B Instruct",
231      "cost_per_1m_in": 0,
232      "cost_per_1m_out": 0,
233      "cost_per_1m_in_cached": 0,
234      "cost_per_1m_out_cached": 0,
235      "context_window": 131072,
236      "default_max_tokens": 13107,
237      "can_reason": false,
238      "supports_attachments": false,
239      "options": {}
240    },
241    {
242      "id": "hf:meta-llama/Llama-3.3-70B-Instruct",
243      "name": "Llama 3.3 70B Instruct",
244      "cost_per_1m_in": 0,
245      "cost_per_1m_out": 0,
246      "cost_per_1m_in_cached": 0,
247      "cost_per_1m_out_cached": 0,
248      "context_window": 131072,
249      "default_max_tokens": 13107,
250      "can_reason": false,
251      "supports_attachments": false,
252      "options": {}
253    },
254    {
255      "id": "hf:meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8",
256      "name": "Llama 4 Maverick 17B 128E Instruct FP8",
257      "cost_per_1m_in": 0,
258      "cost_per_1m_out": 0,
259      "cost_per_1m_in_cached": 0,
260      "cost_per_1m_out_cached": 0,
261      "context_window": 536576,
262      "default_max_tokens": 53657,
263      "can_reason": false,
264      "supports_attachments": true,
265      "options": {}
266    },
267    {
268      "id": "hf:meta-llama/Llama-4-Scout-17B-16E-Instruct",
269      "name": "Llama 4 Scout 17B 16E Instruct",
270      "cost_per_1m_in": 0,
271      "cost_per_1m_out": 0,
272      "cost_per_1m_in_cached": 0,
273      "cost_per_1m_out_cached": 0,
274      "context_window": 335872,
275      "default_max_tokens": 33587,
276      "can_reason": false,
277      "supports_attachments": true,
278      "options": {}
279    },
280    {
281      "id": "hf:MiniMaxAI/MiniMax-M2",
282      "name": "MiniMax M2",
283      "cost_per_1m_in": 0,
284      "cost_per_1m_out": 0,
285      "cost_per_1m_in_cached": 0,
286      "cost_per_1m_out_cached": 0,
287      "context_window": 196608,
288      "default_max_tokens": 19660,
289      "can_reason": true,
290      "reasoning_levels": [
291        "low",
292        "medium",
293        "high"
294      ],
295      "default_reasoning_effort": "medium",
296      "supports_attachments": false,
297      "options": {}
298    },
299    {
300      "id": "hf:Qwen/Qwen3-235B-A22B-Instruct-2507",
301      "name": "Qwen3 235B A22B Instruct 2507",
302      "cost_per_1m_in": 0,
303      "cost_per_1m_out": 0,
304      "cost_per_1m_in_cached": 0,
305      "cost_per_1m_out_cached": 0,
306      "context_window": 262144,
307      "default_max_tokens": 26214,
308      "can_reason": true,
309      "reasoning_levels": [
310        "low",
311        "medium",
312        "high"
313      ],
314      "default_reasoning_effort": "medium",
315      "supports_attachments": false,
316      "options": {}
317    },
318    {
319      "id": "hf:Qwen/Qwen3-235B-A22B-Thinking-2507",
320      "name": "Qwen3 235B A22B Thinking 2507",
321      "cost_per_1m_in": 0,
322      "cost_per_1m_out": 0,
323      "cost_per_1m_in_cached": 0,
324      "cost_per_1m_out_cached": 0,
325      "context_window": 262144,
326      "default_max_tokens": 26214,
327      "can_reason": true,
328      "reasoning_levels": [
329        "low",
330        "medium",
331        "high"
332      ],
333      "default_reasoning_effort": "medium",
334      "supports_attachments": false,
335      "options": {}
336    },
337    {
338      "id": "hf:Qwen/Qwen3-Coder-480B-A35B-Instruct",
339      "name": "Qwen3 Coder 480B A35B Instruct",
340      "cost_per_1m_in": 0,
341      "cost_per_1m_out": 0,
342      "cost_per_1m_in_cached": 0,
343      "cost_per_1m_out_cached": 0,
344      "context_window": 262144,
345      "default_max_tokens": 26214,
346      "can_reason": false,
347      "supports_attachments": false,
348      "options": {}
349    },
350    {
351      "id": "hf:Qwen/Qwen3-VL-235B-A22B-Instruct",
352      "name": "Qwen3 VL 235B A22B Instruct",
353      "cost_per_1m_in": 0,
354      "cost_per_1m_out": 0,
355      "cost_per_1m_in_cached": 0,
356      "cost_per_1m_out_cached": 0,
357      "context_window": 256000,
358      "default_max_tokens": 25600,
359      "can_reason": false,
360      "supports_attachments": true,
361      "options": {}
362    },
363    {
364      "id": "hf:openai/gpt-oss-120b",
365      "name": "gpt oss 120b",
366      "cost_per_1m_in": 0,
367      "cost_per_1m_out": 0,
368      "cost_per_1m_in_cached": 0,
369      "cost_per_1m_out_cached": 0,
370      "context_window": 131072,
371      "default_max_tokens": 13107,
372      "can_reason": false,
373      "supports_attachments": false,
374      "options": {}
375    }
376  ]
377}