1{
2 "name": "io.net",
3 "id": "ionet",
4 "api_key": "$IONET_API_KEY",
5 "api_endpoint": "https://api.intelligence.io.solutions/api/v1",
6 "type": "openai-compat",
7 "default_large_model_id": "moonshotai/Kimi-K2.5",
8 "default_small_model_id": "zai-org/GLM-4.7-Flash",
9 "models": [
10 {
11 "id": "deepseek-ai/DeepSeek-V3.2",
12 "name": "DeepSeek: DeepSeek V3.2",
13 "cost_per_1m_in": 0.25,
14 "cost_per_1m_out": 0.38,
15 "cost_per_1m_in_cached": 0.125,
16 "cost_per_1m_out_cached": 0.5,
17 "context_window": 163840,
18 "default_max_tokens": 16384,
19 "can_reason": true,
20 "supports_attachments": false
21 },
22 {
23 "id": "deepseek-ai/DeepSeek-V4-Flash",
24 "name": "DeepSeek: DeepSeek V4 Flash",
25 "cost_per_1m_in": 1.55,
26 "cost_per_1m_out": 2.28,
27 "cost_per_1m_in_cached": 0.38,
28 "cost_per_1m_out_cached": 0,
29 "context_window": 65000,
30 "default_max_tokens": 6500,
31 "can_reason": true,
32 "supports_attachments": false
33 },
34 {
35 "id": "deepseek-ai/DeepSeek-V4-Pro",
36 "name": "DeepSeek: DeepSeek V4 Pro",
37 "cost_per_1m_in": 4.45,
38 "cost_per_1m_out": 5.5,
39 "cost_per_1m_in_cached": 0.35,
40 "cost_per_1m_out_cached": 0,
41 "context_window": 600000,
42 "default_max_tokens": 60000,
43 "can_reason": true,
44 "supports_attachments": false
45 },
46 {
47 "id": "google/gemma-4-26b-a4b-it",
48 "name": "Google: Gemma 4 26B A4B",
49 "cost_per_1m_in": 0.145,
50 "cost_per_1m_out": 0.5,
51 "cost_per_1m_in_cached": 0.08,
52 "cost_per_1m_out_cached": 0,
53 "context_window": 262144,
54 "default_max_tokens": 26214,
55 "can_reason": true,
56 "supports_attachments": false
57 },
58 {
59 "id": "Intel/Qwen3-Coder-480B-A35B-Instruct-int4-mixed-ar",
60 "name": "Intel: Qwen3 Coder 480B A35B Instruct INT4 Mixed AR",
61 "cost_per_1m_in": 0.22,
62 "cost_per_1m_out": 0.95,
63 "cost_per_1m_in_cached": 0.11,
64 "cost_per_1m_out_cached": 0.44,
65 "context_window": 106000,
66 "default_max_tokens": 10600,
67 "can_reason": false,
68 "supports_attachments": false
69 },
70 {
71 "id": "meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8",
72 "name": "Meta-Llama: Llama 4 Maverick 17B 128E Instruct FP8",
73 "cost_per_1m_in": 0.15,
74 "cost_per_1m_out": 0.6,
75 "cost_per_1m_in_cached": 0.075,
76 "cost_per_1m_out_cached": 0.3,
77 "context_window": 430000,
78 "default_max_tokens": 43000,
79 "can_reason": true,
80 "supports_attachments": true
81 },
82 {
83 "id": "meta-llama/Llama-3.3-70B-Instruct",
84 "name": "Meta: Llama 3.3 70B Instruct",
85 "cost_per_1m_in": 0.1,
86 "cost_per_1m_out": 0.32,
87 "cost_per_1m_in_cached": 0.05,
88 "cost_per_1m_out_cached": 0.2,
89 "context_window": 128000,
90 "default_max_tokens": 12800,
91 "can_reason": true,
92 "supports_attachments": false
93 },
94 {
95 "id": "MiniMaxAI/MiniMax-M2.5",
96 "name": "MiniMaxAI/MiniMax-M2.5",
97 "cost_per_1m_in": 0.2,
98 "cost_per_1m_out": 1.2,
99 "cost_per_1m_in_cached": 0.1,
100 "cost_per_1m_out_cached": 0,
101 "context_window": 196600,
102 "default_max_tokens": 19660,
103 "can_reason": false,
104 "supports_attachments": false
105 },
106 {
107 "id": "mistralai/Mistral-Large-Instruct-2411",
108 "name": "Mistral: Mistral Large Instruct 2411",
109 "cost_per_1m_in": 2,
110 "cost_per_1m_out": 6,
111 "cost_per_1m_in_cached": 1,
112 "cost_per_1m_out_cached": 4,
113 "context_window": 128000,
114 "default_max_tokens": 12800,
115 "can_reason": false,
116 "supports_attachments": true
117 },
118 {
119 "id": "moonshotai/Kimi-K2-Instruct-0905",
120 "name": "MoonshotAI: Kimi K2 Instruct 0905",
121 "cost_per_1m_in": 0.39,
122 "cost_per_1m_out": 1.9,
123 "cost_per_1m_in_cached": 0.195,
124 "cost_per_1m_out_cached": 0.78,
125 "context_window": 262144,
126 "default_max_tokens": 26214,
127 "can_reason": false,
128 "supports_attachments": false
129 },
130 {
131 "id": "moonshotai/Kimi-K2-Thinking",
132 "name": "MoonshotAI: Kimi K2 Thinking",
133 "cost_per_1m_in": 0.32,
134 "cost_per_1m_out": 0.48,
135 "cost_per_1m_in_cached": 0.16,
136 "cost_per_1m_out_cached": 0.64,
137 "context_window": 262144,
138 "default_max_tokens": 26214,
139 "can_reason": true,
140 "supports_attachments": false
141 },
142 {
143 "id": "moonshotai/Kimi-K2.5",
144 "name": "MoonshotAI: Kimi K2.5",
145 "cost_per_1m_in": 0.445,
146 "cost_per_1m_out": 2,
147 "cost_per_1m_in_cached": 0.225,
148 "cost_per_1m_out_cached": 1.1,
149 "context_window": 262144,
150 "default_max_tokens": 26214,
151 "can_reason": true,
152 "supports_attachments": true
153 },
154 {
155 "id": "moonshotai/Kimi-K2.6",
156 "name": "MoonshotAI: Kimi K2.6",
157 "cost_per_1m_in": 0.74,
158 "cost_per_1m_out": 3.49,
159 "cost_per_1m_in_cached": 0.14,
160 "cost_per_1m_out_cached": 0,
161 "context_window": 262142,
162 "default_max_tokens": 26214,
163 "can_reason": true,
164 "supports_attachments": true
165 },
166 {
167 "id": "openai/gpt-oss-120b",
168 "name": "OpenAI: gpt-oss-120b",
169 "cost_per_1m_in": 0.1,
170 "cost_per_1m_out": 0.4,
171 "cost_per_1m_in_cached": 0.01,
172 "cost_per_1m_out_cached": 0.04,
173 "context_window": 131072,
174 "default_max_tokens": 13107,
175 "can_reason": true,
176 "reasoning_levels": [
177 "low",
178 "medium",
179 "high"
180 ],
181 "default_reasoning_effort": "medium",
182 "supports_attachments": false
183 },
184 {
185 "id": "openai/gpt-oss-20b",
186 "name": "OpenAI: gpt-oss-20b",
187 "cost_per_1m_in": 0.016,
188 "cost_per_1m_out": 0.06,
189 "cost_per_1m_in_cached": 0.008,
190 "cost_per_1m_out_cached": 0.032,
191 "context_window": 64000,
192 "default_max_tokens": 6400,
193 "can_reason": true,
194 "reasoning_levels": [
195 "low",
196 "medium",
197 "high"
198 ],
199 "default_reasoning_effort": "medium",
200 "supports_attachments": false
201 },
202 {
203 "id": "Qwen/Qwen2.5-VL-32B-Instruct",
204 "name": "Qwen: Qwen2.5 VL 32B Instruct",
205 "cost_per_1m_in": 0.05,
206 "cost_per_1m_out": 0.22,
207 "cost_per_1m_in_cached": 0.025,
208 "cost_per_1m_out_cached": 0.1,
209 "context_window": 32000,
210 "default_max_tokens": 3200,
211 "can_reason": false,
212 "supports_attachments": true
213 },
214 {
215 "id": "Qwen/Qwen3-Next-80B-A3B-Instruct",
216 "name": "Qwen: Qwen3 Next 80B A3B Instruct",
217 "cost_per_1m_in": 0.06,
218 "cost_per_1m_out": 0.6,
219 "cost_per_1m_in_cached": 0.03,
220 "cost_per_1m_out_cached": 0.12,
221 "context_window": 262144,
222 "default_max_tokens": 26214,
223 "can_reason": false,
224 "supports_attachments": false
225 },
226 {
227 "id": "zai-org/GLM-4.6",
228 "name": "Z.ai: GLM 4.6",
229 "cost_per_1m_in": 0.35,
230 "cost_per_1m_out": 1.5,
231 "cost_per_1m_in_cached": 0.175,
232 "cost_per_1m_out_cached": 0.7,
233 "context_window": 200000,
234 "default_max_tokens": 20000,
235 "can_reason": true,
236 "supports_attachments": false
237 },
238 {
239 "id": "zai-org/GLM-4.7",
240 "name": "Z.ai: GLM 4.7",
241 "cost_per_1m_in": 0.5,
242 "cost_per_1m_out": 2,
243 "cost_per_1m_in_cached": 0.35,
244 "cost_per_1m_out_cached": 0.6,
245 "context_window": 202752,
246 "default_max_tokens": 20275,
247 "can_reason": true,
248 "supports_attachments": false
249 },
250 {
251 "id": "zai-org/GLM-4.7-Flash",
252 "name": "Z.ai: GLM 4.7 Flash",
253 "cost_per_1m_in": 0.07,
254 "cost_per_1m_out": 0.4,
255 "cost_per_1m_in_cached": 0.035,
256 "cost_per_1m_out_cached": 0.14,
257 "context_window": 200000,
258 "default_max_tokens": 20000,
259 "can_reason": true,
260 "supports_attachments": false
261 },
262 {
263 "id": "zai-org/GLM-5",
264 "name": "Z.ai: GLM 5",
265 "cost_per_1m_in": 1,
266 "cost_per_1m_out": 3,
267 "cost_per_1m_in_cached": 0.5,
268 "cost_per_1m_out_cached": 0,
269 "context_window": 202752,
270 "default_max_tokens": 20275,
271 "can_reason": true,
272 "supports_attachments": false
273 },
274 {
275 "id": "zai-org/GLM-5.1",
276 "name": "Z.ai: GLM 5.1",
277 "cost_per_1m_in": 1.5,
278 "cost_per_1m_out": 4.4,
279 "cost_per_1m_in_cached": 0.26,
280 "cost_per_1m_out_cached": 0,
281 "context_window": 202752,
282 "default_max_tokens": 20275,
283 "can_reason": true,
284 "supports_attachments": false
285 }
286 ]
287}