synthetic.json

  1{
  2  "name": "Synthetic",
  3  "id": "synthetic",
  4  "api_key": "$SYNTHETIC_API_KEY",
  5  "api_endpoint": "https://api.synthetic.new/openai/v1",
  6  "type": "openai-compat",
  7  "default_large_model_id": "hf:moonshotai/Kimi-K2.5",
  8  "default_small_model_id": "hf:deepseek-ai/DeepSeek-V3.2",
  9  "models": [
 10    {
 11      "id": "hf:deepseek-ai/DeepSeek-R1-0528",
 12      "name": "DeepSeek R1 0528",
 13      "cost_per_1m_in": 3,
 14      "cost_per_1m_out": 8,
 15      "cost_per_1m_in_cached": 3,
 16      "cost_per_1m_out_cached": 3,
 17      "context_window": 131072,
 18      "default_max_tokens": 13107,
 19      "can_reason": true,
 20      "reasoning_levels": [
 21        "low",
 22        "medium",
 23        "high"
 24      ],
 25      "default_reasoning_effort": "medium",
 26      "supports_attachments": false
 27    },
 28    {
 29      "id": "hf:deepseek-ai/DeepSeek-V3",
 30      "name": "DeepSeek V3",
 31      "cost_per_1m_in": 1.25,
 32      "cost_per_1m_out": 1.25,
 33      "cost_per_1m_in_cached": 1.25,
 34      "cost_per_1m_out_cached": 1.25,
 35      "context_window": 131072,
 36      "default_max_tokens": 13107,
 37      "can_reason": false,
 38      "supports_attachments": false
 39    },
 40    {
 41      "id": "hf:deepseek-ai/DeepSeek-V3.2",
 42      "name": "DeepSeek V3.2",
 43      "cost_per_1m_in": 0.56,
 44      "cost_per_1m_out": 1.68,
 45      "cost_per_1m_in_cached": 0.56,
 46      "cost_per_1m_out_cached": 0.56,
 47      "context_window": 162816,
 48      "default_max_tokens": 16281,
 49      "can_reason": true,
 50      "reasoning_levels": [
 51        "low",
 52        "medium",
 53        "high"
 54      ],
 55      "default_reasoning_effort": "medium",
 56      "supports_attachments": false
 57    },
 58    {
 59      "id": "hf:zai-org/GLM-4.7",
 60      "name": "GLM 4.7",
 61      "cost_per_1m_in": 0.45,
 62      "cost_per_1m_out": 2.19,
 63      "cost_per_1m_in_cached": 0.45,
 64      "cost_per_1m_out_cached": 0.45,
 65      "context_window": 202752,
 66      "default_max_tokens": 20275,
 67      "can_reason": true,
 68      "reasoning_levels": [
 69        "low",
 70        "medium",
 71        "high"
 72      ],
 73      "default_reasoning_effort": "medium",
 74      "supports_attachments": false
 75    },
 76    {
 77      "id": "hf:zai-org/GLM-4.7-Flash",
 78      "name": "GLM 4.7 Flash",
 79      "cost_per_1m_in": 0.1,
 80      "cost_per_1m_out": 0.5,
 81      "cost_per_1m_in_cached": 0.1,
 82      "cost_per_1m_out_cached": 0.1,
 83      "context_window": 196608,
 84      "default_max_tokens": 19660,
 85      "can_reason": true,
 86      "reasoning_levels": [
 87        "low",
 88        "medium",
 89        "high"
 90      ],
 91      "default_reasoning_effort": "medium",
 92      "supports_attachments": false
 93    },
 94    {
 95      "id": "hf:zai-org/GLM-5",
 96      "name": "GLM 5",
 97      "cost_per_1m_in": 1,
 98      "cost_per_1m_out": 3,
 99      "cost_per_1m_in_cached": 1,
100      "cost_per_1m_out_cached": 1,
101      "context_window": 196608,
102      "default_max_tokens": 19660,
103      "can_reason": true,
104      "reasoning_levels": [
105        "low",
106        "medium",
107        "high"
108      ],
109      "default_reasoning_effort": "medium",
110      "supports_attachments": false
111    },
112    {
113      "id": "hf:zai-org/GLM-5.1",
114      "name": "GLM 5.1",
115      "cost_per_1m_in": 1,
116      "cost_per_1m_out": 3,
117      "cost_per_1m_in_cached": 1,
118      "cost_per_1m_out_cached": 1,
119      "context_window": 196608,
120      "default_max_tokens": 19660,
121      "can_reason": true,
122      "reasoning_levels": [
123        "low",
124        "medium",
125        "high"
126      ],
127      "default_reasoning_effort": "medium",
128      "supports_attachments": false
129    },
130    {
131      "id": "hf:moonshotai/Kimi-K2.6",
132      "name": "Kimi K2.6",
133      "cost_per_1m_in": 0.95,
134      "cost_per_1m_out": 4,
135      "cost_per_1m_in_cached": 0.95,
136      "cost_per_1m_out_cached": 0.95,
137      "context_window": 262144,
138      "default_max_tokens": 32768,
139      "can_reason": false,
140      "supports_attachments": true
141    },
142    {
143      "id": "hf:meta-llama/Llama-3.3-70B-Instruct",
144      "name": "Llama 3.3 70B Instruct",
145      "cost_per_1m_in": 0.88,
146      "cost_per_1m_out": 0.88,
147      "cost_per_1m_in_cached": 0.88,
148      "cost_per_1m_out_cached": 0.88,
149      "context_window": 131072,
150      "default_max_tokens": 13107,
151      "can_reason": false,
152      "supports_attachments": false
153    },
154    {
155      "id": "hf:MiniMaxAI/MiniMax-M2.5",
156      "name": "MiniMax M2.5",
157      "cost_per_1m_in": 0.4,
158      "cost_per_1m_out": 2,
159      "cost_per_1m_in_cached": 0.4,
160      "cost_per_1m_out_cached": 0.4,
161      "context_window": 191488,
162      "default_max_tokens": 19148,
163      "can_reason": true,
164      "reasoning_levels": [
165        "low",
166        "medium",
167        "high"
168      ],
169      "default_reasoning_effort": "medium",
170      "supports_attachments": false
171    },
172    {
173      "id": "hf:nvidia/NVIDIA-Nemotron-3-Super-120B-A12B-NVFP4",
174      "name": "NVIDIA Nemotron 3 Super 120B A12B NVFP4",
175      "cost_per_1m_in": 0.3,
176      "cost_per_1m_out": 1,
177      "cost_per_1m_in_cached": 0.3,
178      "cost_per_1m_out_cached": 0.3,
179      "context_window": 262144,
180      "default_max_tokens": 32768,
181      "can_reason": true,
182      "reasoning_levels": [
183        "low",
184        "medium",
185        "high"
186      ],
187      "default_reasoning_effort": "medium",
188      "supports_attachments": false
189    },
190    {
191      "id": "hf:Qwen/Qwen3-235B-A22B-Thinking-2507",
192      "name": "Qwen3 235B A22B Thinking 2507",
193      "cost_per_1m_in": 0.65,
194      "cost_per_1m_out": 3,
195      "cost_per_1m_in_cached": 0.65,
196      "cost_per_1m_out_cached": 0.65,
197      "context_window": 262144,
198      "default_max_tokens": 26214,
199      "can_reason": true,
200      "reasoning_levels": [
201        "low",
202        "medium",
203        "high"
204      ],
205      "default_reasoning_effort": "medium",
206      "supports_attachments": false
207    },
208    {
209      "id": "hf:Qwen/Qwen3-Coder-480B-A35B-Instruct",
210      "name": "Qwen3 Coder 480B A35B Instruct",
211      "cost_per_1m_in": 2,
212      "cost_per_1m_out": 2,
213      "cost_per_1m_in_cached": 2,
214      "cost_per_1m_out_cached": 2,
215      "context_window": 262144,
216      "default_max_tokens": 26214,
217      "can_reason": false,
218      "supports_attachments": false
219    },
220    {
221      "id": "hf:Qwen/Qwen3.5-397B-A17B",
222      "name": "Qwen3.5 397B A17B",
223      "cost_per_1m_in": 0.6,
224      "cost_per_1m_out": 3.6,
225      "cost_per_1m_in_cached": 0.6,
226      "cost_per_1m_out_cached": 0.6,
227      "context_window": 262144,
228      "default_max_tokens": 26214,
229      "can_reason": false,
230      "supports_attachments": true
231    },
232    {
233      "id": "hf:openai/gpt-oss-120b",
234      "name": "gpt oss 120b",
235      "cost_per_1m_in": 0.1,
236      "cost_per_1m_out": 0.1,
237      "cost_per_1m_in_cached": 0.1,
238      "cost_per_1m_out_cached": 0.1,
239      "context_window": 131072,
240      "default_max_tokens": 13107,
241      "can_reason": true,
242      "reasoning_levels": [
243        "low",
244        "medium",
245        "high"
246      ],
247      "default_reasoning_effort": "medium",
248      "supports_attachments": false
249    }
250  ]
251}