synthetic.json

  1{
  2  "name": "Synthetic",
  3  "id": "synthetic",
  4  "api_key": "$SYNTHETIC_API_KEY",
  5  "api_endpoint": "https://api.synthetic.new/openai/v1",
  6  "type": "openai-compat",
  7  "default_large_model_id": "hf:zai-org/GLM-4.6",
  8  "default_small_model_id": "hf:deepseek-ai/DeepSeek-V3.1-Terminus",
  9  "models": [
 10    {
 11      "id": "hf:deepseek-ai/DeepSeek-R1-0528",
 12      "name": "DeepSeek R1 0528",
 13      "cost_per_1m_in": 0,
 14      "cost_per_1m_out": 0,
 15      "cost_per_1m_in_cached": 0,
 16      "cost_per_1m_out_cached": 0,
 17      "context_window": 131072,
 18      "default_max_tokens": 13107,
 19      "can_reason": true,
 20      "reasoning_levels": [
 21        "low",
 22        "medium",
 23        "high"
 24      ],
 25      "default_reasoning_effort": "medium",
 26      "supports_attachments": false,
 27      "options": {}
 28    },
 29    {
 30      "id": "hf:deepseek-ai/DeepSeek-V3",
 31      "name": "DeepSeek V3",
 32      "cost_per_1m_in": 0,
 33      "cost_per_1m_out": 0,
 34      "cost_per_1m_in_cached": 0,
 35      "cost_per_1m_out_cached": 0,
 36      "context_window": 131072,
 37      "default_max_tokens": 13107,
 38      "can_reason": false,
 39      "supports_attachments": false,
 40      "options": {}
 41    },
 42    {
 43      "id": "hf:deepseek-ai/DeepSeek-V3-0324",
 44      "name": "DeepSeek V3 0324",
 45      "cost_per_1m_in": 0,
 46      "cost_per_1m_out": 0,
 47      "cost_per_1m_in_cached": 0,
 48      "cost_per_1m_out_cached": 0,
 49      "context_window": 131072,
 50      "default_max_tokens": 13107,
 51      "can_reason": false,
 52      "supports_attachments": false,
 53      "options": {}
 54    },
 55    {
 56      "id": "hf:deepseek-ai/DeepSeek-V3.1",
 57      "name": "DeepSeek V3.1",
 58      "cost_per_1m_in": 0,
 59      "cost_per_1m_out": 0,
 60      "cost_per_1m_in_cached": 0,
 61      "cost_per_1m_out_cached": 0,
 62      "context_window": 131072,
 63      "default_max_tokens": 13107,
 64      "can_reason": true,
 65      "reasoning_levels": [
 66        "low",
 67        "medium",
 68        "high"
 69      ],
 70      "default_reasoning_effort": "medium",
 71      "supports_attachments": false,
 72      "options": {}
 73    },
 74    {
 75      "id": "hf:deepseek-ai/DeepSeek-V3.1-Terminus",
 76      "name": "DeepSeek V3.1 Terminus",
 77      "cost_per_1m_in": 0,
 78      "cost_per_1m_out": 0,
 79      "cost_per_1m_in_cached": 0,
 80      "cost_per_1m_out_cached": 0,
 81      "context_window": 131072,
 82      "default_max_tokens": 13107,
 83      "can_reason": true,
 84      "reasoning_levels": [
 85        "low",
 86        "medium",
 87        "high"
 88      ],
 89      "default_reasoning_effort": "medium",
 90      "supports_attachments": false,
 91      "options": {}
 92    },
 93    {
 94      "id": "hf:deepseek-ai/DeepSeek-V3.2",
 95      "name": "DeepSeek V3.2",
 96      "cost_per_1m_in": 0,
 97      "cost_per_1m_out": 0,
 98      "cost_per_1m_in_cached": 0,
 99      "cost_per_1m_out_cached": 0,
100      "context_window": 162816,
101      "default_max_tokens": 16281,
102      "can_reason": false,
103      "supports_attachments": false,
104      "options": {}
105    },
106    {
107      "id": "hf:zai-org/GLM-4.5",
108      "name": "GLM 4.5",
109      "cost_per_1m_in": 0,
110      "cost_per_1m_out": 0,
111      "cost_per_1m_in_cached": 0,
112      "cost_per_1m_out_cached": 0,
113      "context_window": 131072,
114      "default_max_tokens": 13107,
115      "can_reason": false,
116      "supports_attachments": false,
117      "options": {}
118    },
119    {
120      "id": "hf:zai-org/GLM-4.6",
121      "name": "GLM 4.6",
122      "cost_per_1m_in": 0,
123      "cost_per_1m_out": 0,
124      "cost_per_1m_in_cached": 0,
125      "cost_per_1m_out_cached": 0,
126      "context_window": 202752,
127      "default_max_tokens": 20275,
128      "can_reason": true,
129      "reasoning_levels": [
130        "low",
131        "medium",
132        "high"
133      ],
134      "default_reasoning_effort": "medium",
135      "supports_attachments": false,
136      "options": {}
137    },
138    {
139      "id": "hf:moonshotai/Kimi-K2-Instruct-0905",
140      "name": "Kimi K2 Instruct 0905",
141      "cost_per_1m_in": 0,
142      "cost_per_1m_out": 0,
143      "cost_per_1m_in_cached": 0,
144      "cost_per_1m_out_cached": 0,
145      "context_window": 262144,
146      "default_max_tokens": 26214,
147      "can_reason": false,
148      "supports_attachments": false,
149      "options": {}
150    },
151    {
152      "id": "hf:moonshotai/Kimi-K2-Thinking",
153      "name": "Kimi K2 Thinking",
154      "cost_per_1m_in": 0,
155      "cost_per_1m_out": 0,
156      "cost_per_1m_in_cached": 0,
157      "cost_per_1m_out_cached": 0,
158      "context_window": 262144,
159      "default_max_tokens": 32768,
160      "can_reason": true,
161      "reasoning_levels": [
162        "low",
163        "medium",
164        "high"
165      ],
166      "default_reasoning_effort": "medium",
167      "supports_attachments": false,
168      "options": {}
169    },
170    {
171      "id": "hf:meta-llama/Llama-3.3-70B-Instruct",
172      "name": "Llama 3.3 70B Instruct",
173      "cost_per_1m_in": 0,
174      "cost_per_1m_out": 0,
175      "cost_per_1m_in_cached": 0,
176      "cost_per_1m_out_cached": 0,
177      "context_window": 131072,
178      "default_max_tokens": 13107,
179      "can_reason": false,
180      "supports_attachments": false,
181      "options": {}
182    },
183    {
184      "id": "hf:meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8",
185      "name": "Llama 4 Maverick 17B 128E Instruct FP8",
186      "cost_per_1m_in": 0,
187      "cost_per_1m_out": 0,
188      "cost_per_1m_in_cached": 0,
189      "cost_per_1m_out_cached": 0,
190      "context_window": 536576,
191      "default_max_tokens": 53657,
192      "can_reason": false,
193      "supports_attachments": true,
194      "options": {}
195    },
196    {
197      "id": "hf:MiniMaxAI/MiniMax-M2",
198      "name": "MiniMax M2",
199      "cost_per_1m_in": 0,
200      "cost_per_1m_out": 0,
201      "cost_per_1m_in_cached": 0,
202      "cost_per_1m_out_cached": 0,
203      "context_window": 196608,
204      "default_max_tokens": 19660,
205      "can_reason": true,
206      "reasoning_levels": [
207        "low",
208        "medium",
209        "high"
210      ],
211      "default_reasoning_effort": "medium",
212      "supports_attachments": false,
213      "options": {}
214    },
215    {
216      "id": "hf:Qwen/Qwen3-235B-A22B-Instruct-2507",
217      "name": "Qwen3 235B A22B Instruct 2507",
218      "cost_per_1m_in": 0,
219      "cost_per_1m_out": 0,
220      "cost_per_1m_in_cached": 0,
221      "cost_per_1m_out_cached": 0,
222      "context_window": 262144,
223      "default_max_tokens": 26214,
224      "can_reason": true,
225      "reasoning_levels": [
226        "low",
227        "medium",
228        "high"
229      ],
230      "default_reasoning_effort": "medium",
231      "supports_attachments": false,
232      "options": {}
233    },
234    {
235      "id": "hf:Qwen/Qwen3-235B-A22B-Thinking-2507",
236      "name": "Qwen3 235B A22B Thinking 2507",
237      "cost_per_1m_in": 0,
238      "cost_per_1m_out": 0,
239      "cost_per_1m_in_cached": 0,
240      "cost_per_1m_out_cached": 0,
241      "context_window": 262144,
242      "default_max_tokens": 26214,
243      "can_reason": true,
244      "reasoning_levels": [
245        "low",
246        "medium",
247        "high"
248      ],
249      "default_reasoning_effort": "medium",
250      "supports_attachments": false,
251      "options": {}
252    },
253    {
254      "id": "hf:Qwen/Qwen3-Coder-480B-A35B-Instruct",
255      "name": "Qwen3 Coder 480B A35B Instruct",
256      "cost_per_1m_in": 0,
257      "cost_per_1m_out": 0,
258      "cost_per_1m_in_cached": 0,
259      "cost_per_1m_out_cached": 0,
260      "context_window": 262144,
261      "default_max_tokens": 26214,
262      "can_reason": false,
263      "supports_attachments": false,
264      "options": {}
265    },
266    {
267      "id": "hf:Qwen/Qwen3-VL-235B-A22B-Instruct",
268      "name": "Qwen3 VL 235B A22B Instruct",
269      "cost_per_1m_in": 0,
270      "cost_per_1m_out": 0,
271      "cost_per_1m_in_cached": 0,
272      "cost_per_1m_out_cached": 0,
273      "context_window": 256000,
274      "default_max_tokens": 25600,
275      "can_reason": false,
276      "supports_attachments": true,
277      "options": {}
278    },
279    {
280      "id": "hf:openai/gpt-oss-120b",
281      "name": "gpt oss 120b",
282      "cost_per_1m_in": 0,
283      "cost_per_1m_out": 0,
284      "cost_per_1m_in_cached": 0,
285      "cost_per_1m_out_cached": 0,
286      "context_window": 131072,
287      "default_max_tokens": 13107,
288      "can_reason": false,
289      "supports_attachments": false,
290      "options": {}
291    }
292  ]
293}