synthetic.json

  1{
  2  "name": "Synthetic",
  3  "id": "synthetic",
  4  "api_key": "$SYNTHETIC_API_KEY",
  5  "api_endpoint": "https://api.synthetic.new/openai/v1",
  6  "type": "openai-compat",
  7  "default_large_model_id": "hf:zai-org/GLM-4.7",
  8  "default_small_model_id": "hf:deepseek-ai/DeepSeek-V3.1-Terminus",
  9  "models": [
 10    {
 11      "id": "hf:deepseek-ai/DeepSeek-R1-0528",
 12      "name": "DeepSeek R1 0528",
 13      "cost_per_1m_in": 3,
 14      "cost_per_1m_out": 8,
 15      "cost_per_1m_in_cached": 3,
 16      "cost_per_1m_out_cached": 3,
 17      "context_window": 131072,
 18      "default_max_tokens": 13107,
 19      "can_reason": true,
 20      "reasoning_levels": [
 21        "low",
 22        "medium",
 23        "high"
 24      ],
 25      "default_reasoning_effort": "medium",
 26      "supports_attachments": false,
 27      "options": {}
 28    },
 29    {
 30      "id": "hf:deepseek-ai/DeepSeek-V3",
 31      "name": "DeepSeek V3",
 32      "cost_per_1m_in": 1.25,
 33      "cost_per_1m_out": 1.25,
 34      "cost_per_1m_in_cached": 1.25,
 35      "cost_per_1m_out_cached": 1.25,
 36      "context_window": 131072,
 37      "default_max_tokens": 13107,
 38      "can_reason": false,
 39      "supports_attachments": false,
 40      "options": {}
 41    },
 42    {
 43      "id": "hf:deepseek-ai/DeepSeek-V3-0324",
 44      "name": "DeepSeek V3 0324",
 45      "cost_per_1m_in": 1.2,
 46      "cost_per_1m_out": 1.2,
 47      "cost_per_1m_in_cached": 1.2,
 48      "cost_per_1m_out_cached": 1.2,
 49      "context_window": 131072,
 50      "default_max_tokens": 13107,
 51      "can_reason": false,
 52      "supports_attachments": false,
 53      "options": {}
 54    },
 55    {
 56      "id": "hf:deepseek-ai/DeepSeek-V3.1",
 57      "name": "DeepSeek V3.1",
 58      "cost_per_1m_in": 0.56,
 59      "cost_per_1m_out": 1.68,
 60      "cost_per_1m_in_cached": 0.56,
 61      "cost_per_1m_out_cached": 0.56,
 62      "context_window": 131072,
 63      "default_max_tokens": 13107,
 64      "can_reason": true,
 65      "reasoning_levels": [
 66        "low",
 67        "medium",
 68        "high"
 69      ],
 70      "default_reasoning_effort": "medium",
 71      "supports_attachments": false,
 72      "options": {}
 73    },
 74    {
 75      "id": "hf:deepseek-ai/DeepSeek-V3.1-Terminus",
 76      "name": "DeepSeek V3.1 Terminus",
 77      "cost_per_1m_in": 1.2,
 78      "cost_per_1m_out": 1.2,
 79      "cost_per_1m_in_cached": 1.2,
 80      "cost_per_1m_out_cached": 1.2,
 81      "context_window": 131072,
 82      "default_max_tokens": 13107,
 83      "can_reason": true,
 84      "reasoning_levels": [
 85        "low",
 86        "medium",
 87        "high"
 88      ],
 89      "default_reasoning_effort": "medium",
 90      "supports_attachments": false,
 91      "options": {}
 92    },
 93    {
 94      "id": "hf:deepseek-ai/DeepSeek-V3.2",
 95      "name": "DeepSeek V3.2",
 96      "cost_per_1m_in": 0.56,
 97      "cost_per_1m_out": 1.68,
 98      "cost_per_1m_in_cached": 0.56,
 99      "cost_per_1m_out_cached": 0.56,
100      "context_window": 162816,
101      "default_max_tokens": 16281,
102      "can_reason": true,
103      "reasoning_levels": [
104        "low",
105        "medium",
106        "high"
107      ],
108      "default_reasoning_effort": "medium",
109      "supports_attachments": false,
110      "options": {}
111    },
112    {
113      "id": "hf:zai-org/GLM-4.5",
114      "name": "GLM 4.5",
115      "cost_per_1m_in": 0.55,
116      "cost_per_1m_out": 2.1900000000000004,
117      "cost_per_1m_in_cached": 0.55,
118      "cost_per_1m_out_cached": 0.55,
119      "context_window": 131072,
120      "default_max_tokens": 13107,
121      "can_reason": false,
122      "supports_attachments": false,
123      "options": {}
124    },
125    {
126      "id": "hf:zai-org/GLM-4.7",
127      "name": "GLM 4.7",
128      "cost_per_1m_in": 0.55,
129      "cost_per_1m_out": 2.1900000000000004,
130      "cost_per_1m_in_cached": 0.55,
131      "cost_per_1m_out_cached": 0.55,
132      "context_window": 202752,
133      "default_max_tokens": 20275,
134      "can_reason": true,
135      "reasoning_levels": [
136        "low",
137        "medium",
138        "high"
139      ],
140      "default_reasoning_effort": "medium",
141      "supports_attachments": false,
142      "options": {}
143    },
144    {
145      "id": "hf:moonshotai/Kimi-K2-Instruct-0905",
146      "name": "Kimi K2 Instruct 0905",
147      "cost_per_1m_in": 1.2,
148      "cost_per_1m_out": 1.2,
149      "cost_per_1m_in_cached": 1.2,
150      "cost_per_1m_out_cached": 1.2,
151      "context_window": 262144,
152      "default_max_tokens": 26214,
153      "can_reason": false,
154      "supports_attachments": false,
155      "options": {}
156    },
157    {
158      "id": "hf:moonshotai/Kimi-K2-Thinking",
159      "name": "Kimi K2 Thinking",
160      "cost_per_1m_in": 0.55,
161      "cost_per_1m_out": 2.1900000000000004,
162      "cost_per_1m_in_cached": 0.55,
163      "cost_per_1m_out_cached": 0.55,
164      "context_window": 262144,
165      "default_max_tokens": 32768,
166      "can_reason": true,
167      "reasoning_levels": [
168        "low",
169        "medium",
170        "high"
171      ],
172      "default_reasoning_effort": "medium",
173      "supports_attachments": false,
174      "options": {}
175    },
176    {
177      "id": "hf:meta-llama/Llama-3.3-70B-Instruct",
178      "name": "Llama 3.3 70B Instruct",
179      "cost_per_1m_in": 0.8999999999999999,
180      "cost_per_1m_out": 0.8999999999999999,
181      "cost_per_1m_in_cached": 0.8999999999999999,
182      "cost_per_1m_out_cached": 0.8999999999999999,
183      "context_window": 131072,
184      "default_max_tokens": 13107,
185      "can_reason": false,
186      "supports_attachments": false,
187      "options": {}
188    },
189    {
190      "id": "hf:meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8",
191      "name": "Llama 4 Maverick 17B 128E Instruct FP8",
192      "cost_per_1m_in": 0.22,
193      "cost_per_1m_out": 0.88,
194      "cost_per_1m_in_cached": 0.22,
195      "cost_per_1m_out_cached": 0.22,
196      "context_window": 536576,
197      "default_max_tokens": 53657,
198      "can_reason": false,
199      "supports_attachments": true,
200      "options": {}
201    },
202    {
203      "id": "hf:MiniMaxAI/MiniMax-M2.1",
204      "name": "MiniMax M2.1",
205      "cost_per_1m_in": 0.55,
206      "cost_per_1m_out": 2.1900000000000004,
207      "cost_per_1m_in_cached": 0.55,
208      "cost_per_1m_out_cached": 0.55,
209      "context_window": 196608,
210      "default_max_tokens": 19660,
211      "can_reason": true,
212      "reasoning_levels": [
213        "low",
214        "medium",
215        "high"
216      ],
217      "default_reasoning_effort": "medium",
218      "supports_attachments": false,
219      "options": {}
220    },
221    {
222      "id": "hf:Qwen/Qwen3-235B-A22B-Instruct-2507",
223      "name": "Qwen3 235B A22B Instruct 2507",
224      "cost_per_1m_in": 0.22,
225      "cost_per_1m_out": 0.88,
226      "cost_per_1m_in_cached": 0.22,
227      "cost_per_1m_out_cached": 0.22,
228      "context_window": 262144,
229      "default_max_tokens": 26214,
230      "can_reason": true,
231      "reasoning_levels": [
232        "low",
233        "medium",
234        "high"
235      ],
236      "default_reasoning_effort": "medium",
237      "supports_attachments": false,
238      "options": {}
239    },
240    {
241      "id": "hf:Qwen/Qwen3-235B-A22B-Thinking-2507",
242      "name": "Qwen3 235B A22B Thinking 2507",
243      "cost_per_1m_in": 0.65,
244      "cost_per_1m_out": 3,
245      "cost_per_1m_in_cached": 0.65,
246      "cost_per_1m_out_cached": 0.65,
247      "context_window": 262144,
248      "default_max_tokens": 26214,
249      "can_reason": true,
250      "reasoning_levels": [
251        "low",
252        "medium",
253        "high"
254      ],
255      "default_reasoning_effort": "medium",
256      "supports_attachments": false,
257      "options": {}
258    },
259    {
260      "id": "hf:Qwen/Qwen3-Coder-480B-A35B-Instruct",
261      "name": "Qwen3 Coder 480B A35B Instruct",
262      "cost_per_1m_in": 0.44999999999999996,
263      "cost_per_1m_out": 1.7999999999999998,
264      "cost_per_1m_in_cached": 0.44999999999999996,
265      "cost_per_1m_out_cached": 0.44999999999999996,
266      "context_window": 262144,
267      "default_max_tokens": 26214,
268      "can_reason": false,
269      "supports_attachments": false,
270      "options": {}
271    },
272    {
273      "id": "hf:Qwen/Qwen3-VL-235B-A22B-Instruct",
274      "name": "Qwen3 VL 235B A22B Instruct",
275      "cost_per_1m_in": 0.22,
276      "cost_per_1m_out": 0.88,
277      "cost_per_1m_in_cached": 0.22,
278      "cost_per_1m_out_cached": 0.22,
279      "context_window": 256000,
280      "default_max_tokens": 25600,
281      "can_reason": false,
282      "supports_attachments": true,
283      "options": {}
284    },
285    {
286      "id": "hf:openai/gpt-oss-120b",
287      "name": "gpt oss 120b",
288      "cost_per_1m_in": 0.09999999999999999,
289      "cost_per_1m_out": 0.09999999999999999,
290      "cost_per_1m_in_cached": 0.09999999999999999,
291      "cost_per_1m_out_cached": 0.09999999999999999,
292      "context_window": 131072,
293      "default_max_tokens": 13107,
294      "can_reason": false,
295      "supports_attachments": false,
296      "options": {}
297    }
298  ]
299}