synthetic.json

  1{
  2  "name": "Synthetic",
  3  "id": "synthetic",
  4  "api_key": "$SYNTHETIC_API_KEY",
  5  "api_endpoint": "https://api.synthetic.new/openai/v1",
  6  "type": "openai-compat",
  7  "default_large_model_id": "hf:zai-org/GLM-4.7",
  8  "default_small_model_id": "hf:deepseek-ai/DeepSeek-V3.1-Terminus",
  9  "models": [
 10    {
 11      "id": "hf:deepseek-ai/DeepSeek-R1-0528",
 12      "name": "DeepSeek R1 0528",
 13      "cost_per_1m_in": 3,
 14      "cost_per_1m_out": 8,
 15      "cost_per_1m_in_cached": 3,
 16      "cost_per_1m_out_cached": 3,
 17      "context_window": 131072,
 18      "default_max_tokens": 13107,
 19      "can_reason": true,
 20      "reasoning_levels": [
 21        "low",
 22        "medium",
 23        "high"
 24      ],
 25      "default_reasoning_effort": "medium",
 26      "supports_attachments": false,
 27      "options": {}
 28    },
 29    {
 30      "id": "hf:deepseek-ai/DeepSeek-V3",
 31      "name": "DeepSeek V3",
 32      "cost_per_1m_in": 1.25,
 33      "cost_per_1m_out": 1.25,
 34      "cost_per_1m_in_cached": 1.25,
 35      "cost_per_1m_out_cached": 1.25,
 36      "context_window": 131072,
 37      "default_max_tokens": 13107,
 38      "can_reason": false,
 39      "supports_attachments": false,
 40      "options": {}
 41    },
 42    {
 43      "id": "hf:deepseek-ai/DeepSeek-V3-0324",
 44      "name": "DeepSeek V3 0324",
 45      "cost_per_1m_in": 1.2,
 46      "cost_per_1m_out": 1.2,
 47      "cost_per_1m_in_cached": 1.2,
 48      "cost_per_1m_out_cached": 1.2,
 49      "context_window": 131072,
 50      "default_max_tokens": 13107,
 51      "can_reason": false,
 52      "supports_attachments": false,
 53      "options": {}
 54    },
 55    {
 56      "id": "hf:deepseek-ai/DeepSeek-V3.1",
 57      "name": "DeepSeek V3.1",
 58      "cost_per_1m_in": 0.56,
 59      "cost_per_1m_out": 1.68,
 60      "cost_per_1m_in_cached": 0.56,
 61      "cost_per_1m_out_cached": 0.56,
 62      "context_window": 131072,
 63      "default_max_tokens": 13107,
 64      "can_reason": true,
 65      "reasoning_levels": [
 66        "low",
 67        "medium",
 68        "high"
 69      ],
 70      "default_reasoning_effort": "medium",
 71      "supports_attachments": false,
 72      "options": {}
 73    },
 74    {
 75      "id": "hf:deepseek-ai/DeepSeek-V3.1-Terminus",
 76      "name": "DeepSeek V3.1 Terminus",
 77      "cost_per_1m_in": 1.2,
 78      "cost_per_1m_out": 1.2,
 79      "cost_per_1m_in_cached": 1.2,
 80      "cost_per_1m_out_cached": 1.2,
 81      "context_window": 131072,
 82      "default_max_tokens": 13107,
 83      "can_reason": true,
 84      "reasoning_levels": [
 85        "low",
 86        "medium",
 87        "high"
 88      ],
 89      "default_reasoning_effort": "medium",
 90      "supports_attachments": false,
 91      "options": {}
 92    },
 93    {
 94      "id": "hf:deepseek-ai/DeepSeek-V3.2",
 95      "name": "DeepSeek V3.2",
 96      "cost_per_1m_in": 0.56,
 97      "cost_per_1m_out": 1.68,
 98      "cost_per_1m_in_cached": 0.56,
 99      "cost_per_1m_out_cached": 0.56,
100      "context_window": 162816,
101      "default_max_tokens": 16281,
102      "can_reason": true,
103      "reasoning_levels": [
104        "low",
105        "medium",
106        "high"
107      ],
108      "default_reasoning_effort": "medium",
109      "supports_attachments": false,
110      "options": {}
111    },
112    {
113      "id": "hf:zai-org/GLM-4.7",
114      "name": "GLM 4.7",
115      "cost_per_1m_in": 0.55,
116      "cost_per_1m_out": 2.1900000000000004,
117      "cost_per_1m_in_cached": 0.55,
118      "cost_per_1m_out_cached": 0.55,
119      "context_window": 202752,
120      "default_max_tokens": 20275,
121      "can_reason": true,
122      "reasoning_levels": [
123        "low",
124        "medium",
125        "high"
126      ],
127      "default_reasoning_effort": "medium",
128      "supports_attachments": false,
129      "options": {}
130    },
131    {
132      "id": "hf:moonshotai/Kimi-K2-Instruct-0905",
133      "name": "Kimi K2 Instruct 0905",
134      "cost_per_1m_in": 1.2,
135      "cost_per_1m_out": 1.2,
136      "cost_per_1m_in_cached": 1.2,
137      "cost_per_1m_out_cached": 1.2,
138      "context_window": 262144,
139      "default_max_tokens": 26214,
140      "can_reason": false,
141      "supports_attachments": false,
142      "options": {}
143    },
144    {
145      "id": "hf:moonshotai/Kimi-K2-Thinking",
146      "name": "Kimi K2 Thinking",
147      "cost_per_1m_in": 0.6,
148      "cost_per_1m_out": 2.5,
149      "cost_per_1m_in_cached": 0.6,
150      "cost_per_1m_out_cached": 0.6,
151      "context_window": 262144,
152      "default_max_tokens": 26214,
153      "can_reason": true,
154      "reasoning_levels": [
155        "low",
156        "medium",
157        "high"
158      ],
159      "default_reasoning_effort": "medium",
160      "supports_attachments": false,
161      "options": {}
162    },
163    {
164      "id": "hf:moonshotai/Kimi-K2.5",
165      "name": "Kimi K2.5",
166      "cost_per_1m_in": 0.55,
167      "cost_per_1m_out": 2.1900000000000004,
168      "cost_per_1m_in_cached": 0.55,
169      "cost_per_1m_out_cached": 0.55,
170      "context_window": 262144,
171      "default_max_tokens": 32768,
172      "can_reason": true,
173      "reasoning_levels": [
174        "low",
175        "medium",
176        "high"
177      ],
178      "default_reasoning_effort": "medium",
179      "supports_attachments": true,
180      "options": {}
181    },
182    {
183      "id": "hf:meta-llama/Llama-3.3-70B-Instruct",
184      "name": "Llama 3.3 70B Instruct",
185      "cost_per_1m_in": 0.8999999999999999,
186      "cost_per_1m_out": 0.8999999999999999,
187      "cost_per_1m_in_cached": 0.8999999999999999,
188      "cost_per_1m_out_cached": 0.8999999999999999,
189      "context_window": 131072,
190      "default_max_tokens": 13107,
191      "can_reason": false,
192      "supports_attachments": false,
193      "options": {}
194    },
195    {
196      "id": "hf:MiniMaxAI/MiniMax-M2.1",
197      "name": "MiniMax M2.1",
198      "cost_per_1m_in": 0.3,
199      "cost_per_1m_out": 1.2,
200      "cost_per_1m_in_cached": 0.3,
201      "cost_per_1m_out_cached": 0.3,
202      "context_window": 196608,
203      "default_max_tokens": 19660,
204      "can_reason": true,
205      "reasoning_levels": [
206        "low",
207        "medium",
208        "high"
209      ],
210      "default_reasoning_effort": "medium",
211      "supports_attachments": false,
212      "options": {}
213    },
214    {
215      "id": "hf:Qwen/Qwen3-235B-A22B-Instruct-2507",
216      "name": "Qwen3 235B A22B Instruct 2507",
217      "cost_per_1m_in": 0.22,
218      "cost_per_1m_out": 0.88,
219      "cost_per_1m_in_cached": 0.22,
220      "cost_per_1m_out_cached": 0.22,
221      "context_window": 262144,
222      "default_max_tokens": 26214,
223      "can_reason": true,
224      "reasoning_levels": [
225        "low",
226        "medium",
227        "high"
228      ],
229      "default_reasoning_effort": "medium",
230      "supports_attachments": false,
231      "options": {}
232    },
233    {
234      "id": "hf:Qwen/Qwen3-235B-A22B-Thinking-2507",
235      "name": "Qwen3 235B A22B Thinking 2507",
236      "cost_per_1m_in": 0.65,
237      "cost_per_1m_out": 3,
238      "cost_per_1m_in_cached": 0.65,
239      "cost_per_1m_out_cached": 0.65,
240      "context_window": 262144,
241      "default_max_tokens": 26214,
242      "can_reason": true,
243      "reasoning_levels": [
244        "low",
245        "medium",
246        "high"
247      ],
248      "default_reasoning_effort": "medium",
249      "supports_attachments": false,
250      "options": {}
251    },
252    {
253      "id": "hf:Qwen/Qwen3-Coder-480B-A35B-Instruct",
254      "name": "Qwen3 Coder 480B A35B Instruct",
255      "cost_per_1m_in": 0.44999999999999996,
256      "cost_per_1m_out": 1.7999999999999998,
257      "cost_per_1m_in_cached": 0.44999999999999996,
258      "cost_per_1m_out_cached": 0.44999999999999996,
259      "context_window": 262144,
260      "default_max_tokens": 26214,
261      "can_reason": false,
262      "supports_attachments": false,
263      "options": {}
264    },
265    {
266      "id": "hf:Qwen/Qwen3-VL-235B-A22B-Instruct",
267      "name": "Qwen3 VL 235B A22B Instruct",
268      "cost_per_1m_in": 0.22,
269      "cost_per_1m_out": 0.88,
270      "cost_per_1m_in_cached": 0.22,
271      "cost_per_1m_out_cached": 0.22,
272      "context_window": 256000,
273      "default_max_tokens": 25600,
274      "can_reason": false,
275      "supports_attachments": true,
276      "options": {}
277    },
278    {
279      "id": "hf:openai/gpt-oss-120b",
280      "name": "gpt oss 120b",
281      "cost_per_1m_in": 0.09999999999999999,
282      "cost_per_1m_out": 0.09999999999999999,
283      "cost_per_1m_in_cached": 0.09999999999999999,
284      "cost_per_1m_out_cached": 0.09999999999999999,
285      "context_window": 131072,
286      "default_max_tokens": 13107,
287      "can_reason": true,
288      "reasoning_levels": [
289        "low",
290        "medium",
291        "high"
292      ],
293      "default_reasoning_effort": "medium",
294      "supports_attachments": false,
295      "options": {}
296    }
297  ]
298}