synthetic.json

  1{
  2  "name": "Synthetic",
  3  "id": "synthetic",
  4  "api_key": "$SYNTHETIC_API_KEY",
  5  "api_endpoint": "https://api.synthetic.new/openai/v1",
  6  "type": "openai-compat",
  7  "default_large_model_id": "hf:zai-org/GLM-4.7",
  8  "default_small_model_id": "hf:deepseek-ai/DeepSeek-V3.1-Terminus",
  9  "models": [
 10    {
 11      "id": "hf:deepseek-ai/DeepSeek-R1-0528",
 12      "name": "DeepSeek R1 0528",
 13      "cost_per_1m_in": 3,
 14      "cost_per_1m_out": 8,
 15      "cost_per_1m_in_cached": 3,
 16      "cost_per_1m_out_cached": 3,
 17      "context_window": 131072,
 18      "default_max_tokens": 13107,
 19      "can_reason": true,
 20      "reasoning_levels": [
 21        "low",
 22        "medium",
 23        "high"
 24      ],
 25      "default_reasoning_effort": "medium",
 26      "supports_attachments": false,
 27      "options": {}
 28    },
 29    {
 30      "id": "hf:deepseek-ai/DeepSeek-V3",
 31      "name": "DeepSeek V3",
 32      "cost_per_1m_in": 1.25,
 33      "cost_per_1m_out": 1.25,
 34      "cost_per_1m_in_cached": 1.25,
 35      "cost_per_1m_out_cached": 1.25,
 36      "context_window": 131072,
 37      "default_max_tokens": 13107,
 38      "can_reason": false,
 39      "supports_attachments": false,
 40      "options": {}
 41    },
 42    {
 43      "id": "hf:deepseek-ai/DeepSeek-V3-0324",
 44      "name": "DeepSeek V3 0324",
 45      "cost_per_1m_in": 1.2,
 46      "cost_per_1m_out": 1.2,
 47      "cost_per_1m_in_cached": 1.2,
 48      "cost_per_1m_out_cached": 1.2,
 49      "context_window": 131072,
 50      "default_max_tokens": 13107,
 51      "can_reason": false,
 52      "supports_attachments": false,
 53      "options": {}
 54    },
 55    {
 56      "id": "hf:deepseek-ai/DeepSeek-V3.1",
 57      "name": "DeepSeek V3.1",
 58      "cost_per_1m_in": 0.56,
 59      "cost_per_1m_out": 1.68,
 60      "cost_per_1m_in_cached": 0.56,
 61      "cost_per_1m_out_cached": 0.56,
 62      "context_window": 131072,
 63      "default_max_tokens": 13107,
 64      "can_reason": true,
 65      "reasoning_levels": [
 66        "low",
 67        "medium",
 68        "high"
 69      ],
 70      "default_reasoning_effort": "medium",
 71      "supports_attachments": false,
 72      "options": {}
 73    },
 74    {
 75      "id": "hf:deepseek-ai/DeepSeek-V3.1-Terminus",
 76      "name": "DeepSeek V3.1 Terminus",
 77      "cost_per_1m_in": 1.2,
 78      "cost_per_1m_out": 1.2,
 79      "cost_per_1m_in_cached": 1.2,
 80      "cost_per_1m_out_cached": 1.2,
 81      "context_window": 131072,
 82      "default_max_tokens": 13107,
 83      "can_reason": true,
 84      "reasoning_levels": [
 85        "low",
 86        "medium",
 87        "high"
 88      ],
 89      "default_reasoning_effort": "medium",
 90      "supports_attachments": false,
 91      "options": {}
 92    },
 93    {
 94      "id": "hf:deepseek-ai/DeepSeek-V3.2",
 95      "name": "DeepSeek V3.2",
 96      "cost_per_1m_in": 0.56,
 97      "cost_per_1m_out": 1.68,
 98      "cost_per_1m_in_cached": 0.56,
 99      "cost_per_1m_out_cached": 0.56,
100      "context_window": 162816,
101      "default_max_tokens": 16281,
102      "can_reason": true,
103      "reasoning_levels": [
104        "low",
105        "medium",
106        "high"
107      ],
108      "default_reasoning_effort": "medium",
109      "supports_attachments": false,
110      "options": {}
111    },
112    {
113      "id": "hf:zai-org/GLM-4.7",
114      "name": "GLM 4.7",
115      "cost_per_1m_in": 0.55,
116      "cost_per_1m_out": 2.1900000000000004,
117      "cost_per_1m_in_cached": 0.55,
118      "cost_per_1m_out_cached": 0.55,
119      "context_window": 202752,
120      "default_max_tokens": 20275,
121      "can_reason": true,
122      "reasoning_levels": [
123        "low",
124        "medium",
125        "high"
126      ],
127      "default_reasoning_effort": "medium",
128      "supports_attachments": false,
129      "options": {}
130    },
131    {
132      "id": "hf:moonshotai/Kimi-K2-Instruct-0905",
133      "name": "Kimi K2 Instruct 0905",
134      "cost_per_1m_in": 1.2,
135      "cost_per_1m_out": 1.2,
136      "cost_per_1m_in_cached": 1.2,
137      "cost_per_1m_out_cached": 1.2,
138      "context_window": 262144,
139      "default_max_tokens": 26214,
140      "can_reason": false,
141      "supports_attachments": false,
142      "options": {}
143    },
144    {
145      "id": "hf:moonshotai/Kimi-K2-Thinking",
146      "name": "Kimi K2 Thinking",
147      "cost_per_1m_in": 0.6,
148      "cost_per_1m_out": 2.5,
149      "cost_per_1m_in_cached": 0.6,
150      "cost_per_1m_out_cached": 0.6,
151      "context_window": 262144,
152      "default_max_tokens": 26214,
153      "can_reason": true,
154      "reasoning_levels": [
155        "low",
156        "medium",
157        "high"
158      ],
159      "default_reasoning_effort": "medium",
160      "supports_attachments": false,
161      "options": {}
162    },
163    {
164      "id": "hf:moonshotai/Kimi-K2.5",
165      "name": "Kimi K2.5",
166      "cost_per_1m_in": 0.55,
167      "cost_per_1m_out": 2.1900000000000004,
168      "cost_per_1m_in_cached": 0.55,
169      "cost_per_1m_out_cached": 0.55,
170      "context_window": 262144,
171      "default_max_tokens": 32768,
172      "can_reason": true,
173      "reasoning_levels": [
174        "low",
175        "medium",
176        "high"
177      ],
178      "default_reasoning_effort": "medium",
179      "supports_attachments": true,
180      "options": {}
181    },
182    {
183      "id": "hf:nvidia/Kimi-K2.5-NVFP4",
184      "name": "Kimi K2.5 NVFP4",
185      "cost_per_1m_in": 0.55,
186      "cost_per_1m_out": 2.1900000000000004,
187      "cost_per_1m_in_cached": 0.55,
188      "cost_per_1m_out_cached": 0.55,
189      "context_window": 262144,
190      "default_max_tokens": 32768,
191      "can_reason": true,
192      "reasoning_levels": [
193        "low",
194        "medium",
195        "high"
196      ],
197      "default_reasoning_effort": "medium",
198      "supports_attachments": true,
199      "options": {}
200    },
201    {
202      "id": "hf:meta-llama/Llama-3.3-70B-Instruct",
203      "name": "Llama 3.3 70B Instruct",
204      "cost_per_1m_in": 0.8999999999999999,
205      "cost_per_1m_out": 0.8999999999999999,
206      "cost_per_1m_in_cached": 0.8999999999999999,
207      "cost_per_1m_out_cached": 0.8999999999999999,
208      "context_window": 131072,
209      "default_max_tokens": 13107,
210      "can_reason": false,
211      "supports_attachments": false,
212      "options": {}
213    },
214    {
215      "id": "hf:MiniMaxAI/MiniMax-M2.1",
216      "name": "MiniMax M2.1",
217      "cost_per_1m_in": 0.3,
218      "cost_per_1m_out": 1.2,
219      "cost_per_1m_in_cached": 0.3,
220      "cost_per_1m_out_cached": 0.3,
221      "context_window": 196608,
222      "default_max_tokens": 19660,
223      "can_reason": true,
224      "reasoning_levels": [
225        "low",
226        "medium",
227        "high"
228      ],
229      "default_reasoning_effort": "medium",
230      "supports_attachments": false,
231      "options": {}
232    },
233    {
234      "id": "hf:Qwen/Qwen3-235B-A22B-Instruct-2507",
235      "name": "Qwen3 235B A22B Instruct 2507",
236      "cost_per_1m_in": 0.22,
237      "cost_per_1m_out": 0.88,
238      "cost_per_1m_in_cached": 0.22,
239      "cost_per_1m_out_cached": 0.22,
240      "context_window": 262144,
241      "default_max_tokens": 26214,
242      "can_reason": true,
243      "reasoning_levels": [
244        "low",
245        "medium",
246        "high"
247      ],
248      "default_reasoning_effort": "medium",
249      "supports_attachments": false,
250      "options": {}
251    },
252    {
253      "id": "hf:Qwen/Qwen3-235B-A22B-Thinking-2507",
254      "name": "Qwen3 235B A22B Thinking 2507",
255      "cost_per_1m_in": 0.65,
256      "cost_per_1m_out": 3,
257      "cost_per_1m_in_cached": 0.65,
258      "cost_per_1m_out_cached": 0.65,
259      "context_window": 262144,
260      "default_max_tokens": 26214,
261      "can_reason": true,
262      "reasoning_levels": [
263        "low",
264        "medium",
265        "high"
266      ],
267      "default_reasoning_effort": "medium",
268      "supports_attachments": false,
269      "options": {}
270    },
271    {
272      "id": "hf:Qwen/Qwen3-Coder-480B-A35B-Instruct",
273      "name": "Qwen3 Coder 480B A35B Instruct",
274      "cost_per_1m_in": 0.44999999999999996,
275      "cost_per_1m_out": 1.7999999999999998,
276      "cost_per_1m_in_cached": 0.44999999999999996,
277      "cost_per_1m_out_cached": 0.44999999999999996,
278      "context_window": 262144,
279      "default_max_tokens": 26214,
280      "can_reason": false,
281      "supports_attachments": false,
282      "options": {}
283    },
284    {
285      "id": "hf:Qwen/Qwen3-VL-235B-A22B-Instruct",
286      "name": "Qwen3 VL 235B A22B Instruct",
287      "cost_per_1m_in": 0.22,
288      "cost_per_1m_out": 0.88,
289      "cost_per_1m_in_cached": 0.22,
290      "cost_per_1m_out_cached": 0.22,
291      "context_window": 256000,
292      "default_max_tokens": 25600,
293      "can_reason": false,
294      "supports_attachments": true,
295      "options": {}
296    },
297    {
298      "id": "hf:openai/gpt-oss-120b",
299      "name": "gpt oss 120b",
300      "cost_per_1m_in": 0.09999999999999999,
301      "cost_per_1m_out": 0.09999999999999999,
302      "cost_per_1m_in_cached": 0.09999999999999999,
303      "cost_per_1m_out_cached": 0.09999999999999999,
304      "context_window": 131072,
305      "default_max_tokens": 13107,
306      "can_reason": true,
307      "reasoning_levels": [
308        "low",
309        "medium",
310        "high"
311      ],
312      "default_reasoning_effort": "medium",
313      "supports_attachments": false,
314      "options": {}
315    }
316  ]
317}