synthetic.json

  1{
  2  "name": "Synthetic",
  3  "id": "synthetic",
  4  "api_key": "$SYNTHETIC_API_KEY",
  5  "api_endpoint": "https://api.synthetic.new/openai/v1",
  6  "type": "openai-compat",
  7  "default_large_model_id": "hf:zai-org/GLM-4.6",
  8  "default_small_model_id": "hf:deepseek-ai/DeepSeek-V3.1-Terminus",
  9  "models": [
 10    {
 11      "id": "hf:deepseek-ai/DeepSeek-R1",
 12      "name": "DeepSeek R1",
 13      "cost_per_1m_in": 0,
 14      "cost_per_1m_out": 0,
 15      "cost_per_1m_in_cached": 0,
 16      "cost_per_1m_out_cached": 0,
 17      "context_window": 131072,
 18      "default_max_tokens": 13107,
 19      "can_reason": true,
 20      "reasoning_levels": [
 21        "low",
 22        "medium",
 23        "high"
 24      ],
 25      "default_reasoning_effort": "medium",
 26      "supports_attachments": false,
 27      "options": {}
 28    },
 29    {
 30      "id": "hf:deepseek-ai/DeepSeek-R1-0528",
 31      "name": "DeepSeek R1 0528",
 32      "cost_per_1m_in": 0,
 33      "cost_per_1m_out": 0,
 34      "cost_per_1m_in_cached": 0,
 35      "cost_per_1m_out_cached": 0,
 36      "context_window": 131072,
 37      "default_max_tokens": 13107,
 38      "can_reason": true,
 39      "reasoning_levels": [
 40        "low",
 41        "medium",
 42        "high"
 43      ],
 44      "default_reasoning_effort": "medium",
 45      "supports_attachments": false,
 46      "options": {}
 47    },
 48    {
 49      "id": "hf:deepseek-ai/DeepSeek-V3",
 50      "name": "DeepSeek V3",
 51      "cost_per_1m_in": 0,
 52      "cost_per_1m_out": 0,
 53      "cost_per_1m_in_cached": 0,
 54      "cost_per_1m_out_cached": 0,
 55      "context_window": 131072,
 56      "default_max_tokens": 13107,
 57      "can_reason": false,
 58      "supports_attachments": false,
 59      "options": {}
 60    },
 61    {
 62      "id": "hf:deepseek-ai/DeepSeek-V3-0324",
 63      "name": "DeepSeek V3 0324",
 64      "cost_per_1m_in": 0,
 65      "cost_per_1m_out": 0,
 66      "cost_per_1m_in_cached": 0,
 67      "cost_per_1m_out_cached": 0,
 68      "context_window": 131072,
 69      "default_max_tokens": 13107,
 70      "can_reason": false,
 71      "supports_attachments": false,
 72      "options": {}
 73    },
 74    {
 75      "id": "hf:deepseek-ai/DeepSeek-V3.1",
 76      "name": "DeepSeek V3.1",
 77      "cost_per_1m_in": 0,
 78      "cost_per_1m_out": 0,
 79      "cost_per_1m_in_cached": 0,
 80      "cost_per_1m_out_cached": 0,
 81      "context_window": 131072,
 82      "default_max_tokens": 13107,
 83      "can_reason": true,
 84      "reasoning_levels": [
 85        "low",
 86        "medium",
 87        "high"
 88      ],
 89      "default_reasoning_effort": "medium",
 90      "supports_attachments": false,
 91      "options": {}
 92    },
 93    {
 94      "id": "hf:deepseek-ai/DeepSeek-V3.1-Terminus",
 95      "name": "DeepSeek V3.1 Terminus",
 96      "cost_per_1m_in": 0,
 97      "cost_per_1m_out": 0,
 98      "cost_per_1m_in_cached": 0,
 99      "cost_per_1m_out_cached": 0,
100      "context_window": 131072,
101      "default_max_tokens": 13107,
102      "can_reason": true,
103      "reasoning_levels": [
104        "low",
105        "medium",
106        "high"
107      ],
108      "default_reasoning_effort": "medium",
109      "supports_attachments": false,
110      "options": {}
111    },
112    {
113      "id": "hf:zai-org/GLM-4.5",
114      "name": "GLM 4.5",
115      "cost_per_1m_in": 0,
116      "cost_per_1m_out": 0,
117      "cost_per_1m_in_cached": 0,
118      "cost_per_1m_out_cached": 0,
119      "context_window": 131072,
120      "default_max_tokens": 13107,
121      "can_reason": false,
122      "supports_attachments": false,
123      "options": {}
124    },
125    {
126      "id": "hf:zai-org/GLM-4.6",
127      "name": "GLM 4.6",
128      "cost_per_1m_in": 0,
129      "cost_per_1m_out": 0,
130      "cost_per_1m_in_cached": 0,
131      "cost_per_1m_out_cached": 0,
132      "context_window": 202752,
133      "default_max_tokens": 20275,
134      "can_reason": true,
135      "reasoning_levels": [
136        "low",
137        "medium",
138        "high"
139      ],
140      "default_reasoning_effort": "medium",
141      "supports_attachments": false,
142      "options": {}
143    },
144    {
145      "id": "hf:moonshotai/Kimi-K2-Instruct",
146      "name": "Kimi K2 Instruct",
147      "cost_per_1m_in": 0,
148      "cost_per_1m_out": 0,
149      "cost_per_1m_in_cached": 0,
150      "cost_per_1m_out_cached": 0,
151      "context_window": 131072,
152      "default_max_tokens": 13107,
153      "can_reason": false,
154      "supports_attachments": false,
155      "options": {}
156    },
157    {
158      "id": "hf:moonshotai/Kimi-K2-Instruct-0905",
159      "name": "Kimi K2 Instruct 0905",
160      "cost_per_1m_in": 0,
161      "cost_per_1m_out": 0,
162      "cost_per_1m_in_cached": 0,
163      "cost_per_1m_out_cached": 0,
164      "context_window": 262144,
165      "default_max_tokens": 26214,
166      "can_reason": false,
167      "supports_attachments": false,
168      "options": {}
169    },
170    {
171      "id": "hf:moonshotai/Kimi-K2-Thinking",
172      "name": "Kimi K2 Thinking",
173      "cost_per_1m_in": 0,
174      "cost_per_1m_out": 0,
175      "cost_per_1m_in_cached": 0,
176      "cost_per_1m_out_cached": 0,
177      "context_window": 262144,
178      "default_max_tokens": 32768,
179      "can_reason": true,
180      "reasoning_levels": [
181        "low",
182        "medium",
183        "high"
184      ],
185      "default_reasoning_effort": "medium",
186      "supports_attachments": false,
187      "options": {}
188    },
189    {
190      "id": "hf:meta-llama/Llama-3.1-405B-Instruct",
191      "name": "Llama 3.1 405B Instruct",
192      "cost_per_1m_in": 0,
193      "cost_per_1m_out": 0,
194      "cost_per_1m_in_cached": 0,
195      "cost_per_1m_out_cached": 0,
196      "context_window": 131072,
197      "default_max_tokens": 13107,
198      "can_reason": false,
199      "supports_attachments": false,
200      "options": {}
201    },
202    {
203      "id": "hf:meta-llama/Llama-3.1-70B-Instruct",
204      "name": "Llama 3.1 70B Instruct",
205      "cost_per_1m_in": 0,
206      "cost_per_1m_out": 0,
207      "cost_per_1m_in_cached": 0,
208      "cost_per_1m_out_cached": 0,
209      "context_window": 131072,
210      "default_max_tokens": 13107,
211      "can_reason": false,
212      "supports_attachments": false,
213      "options": {}
214    },
215    {
216      "id": "hf:meta-llama/Llama-3.1-8B-Instruct",
217      "name": "Llama 3.1 8B Instruct",
218      "cost_per_1m_in": 0,
219      "cost_per_1m_out": 0,
220      "cost_per_1m_in_cached": 0,
221      "cost_per_1m_out_cached": 0,
222      "context_window": 131072,
223      "default_max_tokens": 13107,
224      "can_reason": false,
225      "supports_attachments": false,
226      "options": {}
227    },
228    {
229      "id": "hf:meta-llama/Llama-3.3-70B-Instruct",
230      "name": "Llama 3.3 70B Instruct",
231      "cost_per_1m_in": 0,
232      "cost_per_1m_out": 0,
233      "cost_per_1m_in_cached": 0,
234      "cost_per_1m_out_cached": 0,
235      "context_window": 131072,
236      "default_max_tokens": 13107,
237      "can_reason": false,
238      "supports_attachments": false,
239      "options": {}
240    },
241    {
242      "id": "hf:meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8",
243      "name": "Llama 4 Maverick 17B 128E Instruct FP8",
244      "cost_per_1m_in": 0,
245      "cost_per_1m_out": 0,
246      "cost_per_1m_in_cached": 0,
247      "cost_per_1m_out_cached": 0,
248      "context_window": 536576,
249      "default_max_tokens": 53657,
250      "can_reason": false,
251      "supports_attachments": true,
252      "options": {}
253    },
254    {
255      "id": "hf:meta-llama/Llama-4-Scout-17B-16E-Instruct",
256      "name": "Llama 4 Scout 17B 16E Instruct",
257      "cost_per_1m_in": 0,
258      "cost_per_1m_out": 0,
259      "cost_per_1m_in_cached": 0,
260      "cost_per_1m_out_cached": 0,
261      "context_window": 335872,
262      "default_max_tokens": 33587,
263      "can_reason": false,
264      "supports_attachments": true,
265      "options": {}
266    },
267    {
268      "id": "hf:MiniMaxAI/MiniMax-M2",
269      "name": "MiniMax M2",
270      "cost_per_1m_in": 0,
271      "cost_per_1m_out": 0,
272      "cost_per_1m_in_cached": 0,
273      "cost_per_1m_out_cached": 0,
274      "context_window": 196608,
275      "default_max_tokens": 19660,
276      "can_reason": true,
277      "reasoning_levels": [
278        "low",
279        "medium",
280        "high"
281      ],
282      "default_reasoning_effort": "medium",
283      "supports_attachments": false,
284      "options": {}
285    },
286    {
287      "id": "hf:Qwen/Qwen3-235B-A22B-Instruct-2507",
288      "name": "Qwen3 235B A22B Instruct 2507",
289      "cost_per_1m_in": 0,
290      "cost_per_1m_out": 0,
291      "cost_per_1m_in_cached": 0,
292      "cost_per_1m_out_cached": 0,
293      "context_window": 262144,
294      "default_max_tokens": 26214,
295      "can_reason": true,
296      "reasoning_levels": [
297        "low",
298        "medium",
299        "high"
300      ],
301      "default_reasoning_effort": "medium",
302      "supports_attachments": false,
303      "options": {}
304    },
305    {
306      "id": "hf:Qwen/Qwen3-235B-A22B-Thinking-2507",
307      "name": "Qwen3 235B A22B Thinking 2507",
308      "cost_per_1m_in": 0,
309      "cost_per_1m_out": 0,
310      "cost_per_1m_in_cached": 0,
311      "cost_per_1m_out_cached": 0,
312      "context_window": 262144,
313      "default_max_tokens": 26214,
314      "can_reason": true,
315      "reasoning_levels": [
316        "low",
317        "medium",
318        "high"
319      ],
320      "default_reasoning_effort": "medium",
321      "supports_attachments": false,
322      "options": {}
323    },
324    {
325      "id": "hf:Qwen/Qwen3-Coder-480B-A35B-Instruct",
326      "name": "Qwen3 Coder 480B A35B Instruct",
327      "cost_per_1m_in": 0,
328      "cost_per_1m_out": 0,
329      "cost_per_1m_in_cached": 0,
330      "cost_per_1m_out_cached": 0,
331      "context_window": 262144,
332      "default_max_tokens": 26214,
333      "can_reason": false,
334      "supports_attachments": false,
335      "options": {}
336    },
337    {
338      "id": "hf:Qwen/Qwen3-VL-235B-A22B-Instruct",
339      "name": "Qwen3 VL 235B A22B Instruct",
340      "cost_per_1m_in": 0,
341      "cost_per_1m_out": 0,
342      "cost_per_1m_in_cached": 0,
343      "cost_per_1m_out_cached": 0,
344      "context_window": 256000,
345      "default_max_tokens": 25600,
346      "can_reason": false,
347      "supports_attachments": true,
348      "options": {}
349    },
350    {
351      "id": "hf:openai/gpt-oss-120b",
352      "name": "gpt oss 120b",
353      "cost_per_1m_in": 0,
354      "cost_per_1m_out": 0,
355      "cost_per_1m_in_cached": 0,
356      "cost_per_1m_out_cached": 0,
357      "context_window": 131072,
358      "default_max_tokens": 13107,
359      "can_reason": false,
360      "supports_attachments": false,
361      "options": {}
362    }
363  ]
364}