1{
2 "name": "OpenRouter",
3 "id": "openrouter",
4 "api_key": "$OPENROUTER_API_KEY",
5 "api_endpoint": "https://openrouter.ai/api/v1",
6 "type": "openai",
7 "default_large_model_id": "anthropic/claude-sonnet-4",
8 "default_small_model_id": "anthropic/claude-3.5-haiku",
9 "models": [
10 {
11 "id": "ai21/jamba-large-1.7",
12 "name": "AI21: Jamba Large 1.7",
13 "cost_per_1m_in": 2,
14 "cost_per_1m_out": 8,
15 "cost_per_1m_in_cached": 0,
16 "cost_per_1m_out_cached": 0,
17 "context_window": 256000,
18 "default_max_tokens": 2048,
19 "can_reason": false,
20 "has_reasoning_efforts": false,
21 "supports_attachments": false
22 },
23 {
24 "id": "ai21/jamba-mini-1.7",
25 "name": "AI21: Jamba Mini 1.7",
26 "cost_per_1m_in": 0.19999999999999998,
27 "cost_per_1m_out": 0.39999999999999997,
28 "cost_per_1m_in_cached": 0,
29 "cost_per_1m_out_cached": 0,
30 "context_window": 256000,
31 "default_max_tokens": 2048,
32 "can_reason": false,
33 "has_reasoning_efforts": false,
34 "supports_attachments": false
35 },
36 {
37 "id": "amazon/nova-lite-v1",
38 "name": "Amazon: Nova Lite 1.0",
39 "cost_per_1m_in": 0.06,
40 "cost_per_1m_out": 0.24,
41 "cost_per_1m_in_cached": 0,
42 "cost_per_1m_out_cached": 0,
43 "context_window": 300000,
44 "default_max_tokens": 2560,
45 "can_reason": false,
46 "has_reasoning_efforts": false,
47 "supports_attachments": true
48 },
49 {
50 "id": "amazon/nova-micro-v1",
51 "name": "Amazon: Nova Micro 1.0",
52 "cost_per_1m_in": 0.035,
53 "cost_per_1m_out": 0.14,
54 "cost_per_1m_in_cached": 0,
55 "cost_per_1m_out_cached": 0,
56 "context_window": 128000,
57 "default_max_tokens": 2560,
58 "can_reason": false,
59 "has_reasoning_efforts": false,
60 "supports_attachments": false
61 },
62 {
63 "id": "amazon/nova-pro-v1",
64 "name": "Amazon: Nova Pro 1.0",
65 "cost_per_1m_in": 0.7999999999999999,
66 "cost_per_1m_out": 3.1999999999999997,
67 "cost_per_1m_in_cached": 0,
68 "cost_per_1m_out_cached": 0,
69 "context_window": 300000,
70 "default_max_tokens": 2560,
71 "can_reason": false,
72 "has_reasoning_efforts": false,
73 "supports_attachments": true
74 },
75 {
76 "id": "openrouter/andromeda-alpha",
77 "name": "Andromeda Alpha",
78 "cost_per_1m_in": 0,
79 "cost_per_1m_out": 0,
80 "cost_per_1m_in_cached": 0,
81 "cost_per_1m_out_cached": 0,
82 "context_window": 128000,
83 "default_max_tokens": 64000,
84 "can_reason": true,
85 "has_reasoning_efforts": true,
86 "supports_attachments": true
87 },
88 {
89 "id": "anthropic/claude-3-haiku",
90 "name": "Anthropic: Claude 3 Haiku",
91 "cost_per_1m_in": 0.25,
92 "cost_per_1m_out": 1.25,
93 "cost_per_1m_in_cached": 0.3,
94 "cost_per_1m_out_cached": 0.03,
95 "context_window": 200000,
96 "default_max_tokens": 2048,
97 "can_reason": false,
98 "has_reasoning_efforts": false,
99 "supports_attachments": true
100 },
101 {
102 "id": "anthropic/claude-3-opus",
103 "name": "Anthropic: Claude 3 Opus",
104 "cost_per_1m_in": 15,
105 "cost_per_1m_out": 75,
106 "cost_per_1m_in_cached": 18.75,
107 "cost_per_1m_out_cached": 1.5,
108 "context_window": 200000,
109 "default_max_tokens": 2048,
110 "can_reason": false,
111 "has_reasoning_efforts": false,
112 "supports_attachments": true
113 },
114 {
115 "id": "anthropic/claude-3.5-haiku",
116 "name": "Anthropic: Claude 3.5 Haiku",
117 "cost_per_1m_in": 0.7999999999999999,
118 "cost_per_1m_out": 4,
119 "cost_per_1m_in_cached": 1,
120 "cost_per_1m_out_cached": 0.08,
121 "context_window": 200000,
122 "default_max_tokens": 4096,
123 "can_reason": false,
124 "has_reasoning_efforts": false,
125 "supports_attachments": true
126 },
127 {
128 "id": "anthropic/claude-3.5-haiku-20241022",
129 "name": "Anthropic: Claude 3.5 Haiku (2024-10-22)",
130 "cost_per_1m_in": 0.7999999999999999,
131 "cost_per_1m_out": 4,
132 "cost_per_1m_in_cached": 1,
133 "cost_per_1m_out_cached": 0.08,
134 "context_window": 200000,
135 "default_max_tokens": 4096,
136 "can_reason": false,
137 "has_reasoning_efforts": false,
138 "supports_attachments": true
139 },
140 {
141 "id": "anthropic/claude-3.5-sonnet",
142 "name": "Anthropic: Claude 3.5 Sonnet",
143 "cost_per_1m_in": 3,
144 "cost_per_1m_out": 15,
145 "cost_per_1m_in_cached": 3.75,
146 "cost_per_1m_out_cached": 0.3,
147 "context_window": 200000,
148 "default_max_tokens": 4096,
149 "can_reason": false,
150 "has_reasoning_efforts": false,
151 "supports_attachments": true
152 },
153 {
154 "id": "anthropic/claude-3.5-sonnet-20240620",
155 "name": "Anthropic: Claude 3.5 Sonnet (2024-06-20)",
156 "cost_per_1m_in": 3,
157 "cost_per_1m_out": 15,
158 "cost_per_1m_in_cached": 3.75,
159 "cost_per_1m_out_cached": 0.3,
160 "context_window": 200000,
161 "default_max_tokens": 4096,
162 "can_reason": false,
163 "has_reasoning_efforts": false,
164 "supports_attachments": true
165 },
166 {
167 "id": "anthropic/claude-3.7-sonnet",
168 "name": "Anthropic: Claude 3.7 Sonnet",
169 "cost_per_1m_in": 3,
170 "cost_per_1m_out": 15,
171 "cost_per_1m_in_cached": 3.75,
172 "cost_per_1m_out_cached": 0.3,
173 "context_window": 200000,
174 "default_max_tokens": 32000,
175 "can_reason": true,
176 "has_reasoning_efforts": true,
177 "supports_attachments": true
178 },
179 {
180 "id": "anthropic/claude-3.7-sonnet:thinking",
181 "name": "Anthropic: Claude 3.7 Sonnet (thinking)",
182 "cost_per_1m_in": 3,
183 "cost_per_1m_out": 15,
184 "cost_per_1m_in_cached": 3.75,
185 "cost_per_1m_out_cached": 0.3,
186 "context_window": 200000,
187 "default_max_tokens": 32000,
188 "can_reason": true,
189 "has_reasoning_efforts": true,
190 "supports_attachments": true
191 },
192 {
193 "id": "anthropic/claude-haiku-4.5",
194 "name": "Anthropic: Claude Haiku 4.5",
195 "cost_per_1m_in": 1,
196 "cost_per_1m_out": 5,
197 "cost_per_1m_in_cached": 1.25,
198 "cost_per_1m_out_cached": 0.09999999999999999,
199 "context_window": 200000,
200 "default_max_tokens": 32000,
201 "can_reason": true,
202 "has_reasoning_efforts": true,
203 "supports_attachments": true
204 },
205 {
206 "id": "anthropic/claude-opus-4",
207 "name": "Anthropic: Claude Opus 4",
208 "cost_per_1m_in": 15,
209 "cost_per_1m_out": 75,
210 "cost_per_1m_in_cached": 18.75,
211 "cost_per_1m_out_cached": 1.5,
212 "context_window": 200000,
213 "default_max_tokens": 16000,
214 "can_reason": true,
215 "has_reasoning_efforts": true,
216 "supports_attachments": true
217 },
218 {
219 "id": "anthropic/claude-opus-4.1",
220 "name": "Anthropic: Claude Opus 4.1",
221 "cost_per_1m_in": 15,
222 "cost_per_1m_out": 75,
223 "cost_per_1m_in_cached": 18.75,
224 "cost_per_1m_out_cached": 1.5,
225 "context_window": 200000,
226 "default_max_tokens": 16000,
227 "can_reason": true,
228 "has_reasoning_efforts": true,
229 "supports_attachments": true
230 },
231 {
232 "id": "anthropic/claude-sonnet-4",
233 "name": "Anthropic: Claude Sonnet 4",
234 "cost_per_1m_in": 3,
235 "cost_per_1m_out": 15,
236 "cost_per_1m_in_cached": 3.75,
237 "cost_per_1m_out_cached": 0.3,
238 "context_window": 1000000,
239 "default_max_tokens": 32000,
240 "can_reason": true,
241 "has_reasoning_efforts": true,
242 "supports_attachments": true
243 },
244 {
245 "id": "anthropic/claude-sonnet-4.5",
246 "name": "Anthropic: Claude Sonnet 4.5",
247 "cost_per_1m_in": 3,
248 "cost_per_1m_out": 15,
249 "cost_per_1m_in_cached": 3.75,
250 "cost_per_1m_out_cached": 0.3,
251 "context_window": 1000000,
252 "default_max_tokens": 32000,
253 "can_reason": true,
254 "has_reasoning_efforts": true,
255 "supports_attachments": true
256 },
257 {
258 "id": "arcee-ai/virtuoso-large",
259 "name": "Arcee AI: Virtuoso Large",
260 "cost_per_1m_in": 0.75,
261 "cost_per_1m_out": 1.2,
262 "cost_per_1m_in_cached": 0,
263 "cost_per_1m_out_cached": 0,
264 "context_window": 131072,
265 "default_max_tokens": 32000,
266 "can_reason": false,
267 "has_reasoning_efforts": false,
268 "supports_attachments": false
269 },
270 {
271 "id": "baidu/ernie-4.5-21b-a3b",
272 "name": "Baidu: ERNIE 4.5 21B A3B",
273 "cost_per_1m_in": 0.07,
274 "cost_per_1m_out": 0.28,
275 "cost_per_1m_in_cached": 0,
276 "cost_per_1m_out_cached": 0,
277 "context_window": 120000,
278 "default_max_tokens": 4000,
279 "can_reason": false,
280 "has_reasoning_efforts": false,
281 "supports_attachments": false
282 },
283 {
284 "id": "baidu/ernie-4.5-vl-28b-a3b",
285 "name": "Baidu: ERNIE 4.5 VL 28B A3B",
286 "cost_per_1m_in": 0.14,
287 "cost_per_1m_out": 0.56,
288 "cost_per_1m_in_cached": 0,
289 "cost_per_1m_out_cached": 0,
290 "context_window": 30000,
291 "default_max_tokens": 4000,
292 "can_reason": true,
293 "has_reasoning_efforts": true,
294 "supports_attachments": true
295 },
296 {
297 "id": "deepcogito/cogito-v2-preview-llama-109b-moe",
298 "name": "Cogito V2 Preview Llama 109B",
299 "cost_per_1m_in": 0.18,
300 "cost_per_1m_out": 0.59,
301 "cost_per_1m_in_cached": 0,
302 "cost_per_1m_out_cached": 0,
303 "context_window": 32767,
304 "default_max_tokens": 3276,
305 "can_reason": true,
306 "has_reasoning_efforts": true,
307 "supports_attachments": true
308 },
309 {
310 "id": "cohere/command-r-08-2024",
311 "name": "Cohere: Command R (08-2024)",
312 "cost_per_1m_in": 0.15,
313 "cost_per_1m_out": 0.6,
314 "cost_per_1m_in_cached": 0,
315 "cost_per_1m_out_cached": 0,
316 "context_window": 128000,
317 "default_max_tokens": 2000,
318 "can_reason": false,
319 "has_reasoning_efforts": false,
320 "supports_attachments": false
321 },
322 {
323 "id": "cohere/command-r-plus-08-2024",
324 "name": "Cohere: Command R+ (08-2024)",
325 "cost_per_1m_in": 2.5,
326 "cost_per_1m_out": 10,
327 "cost_per_1m_in_cached": 0,
328 "cost_per_1m_out_cached": 0,
329 "context_window": 128000,
330 "default_max_tokens": 2000,
331 "can_reason": false,
332 "has_reasoning_efforts": false,
333 "supports_attachments": false
334 },
335 {
336 "id": "deepcogito/cogito-v2-preview-llama-405b",
337 "name": "Deep Cogito: Cogito V2 Preview Llama 405B",
338 "cost_per_1m_in": 3.5,
339 "cost_per_1m_out": 3.5,
340 "cost_per_1m_in_cached": 0,
341 "cost_per_1m_out_cached": 0,
342 "context_window": 32768,
343 "default_max_tokens": 3276,
344 "can_reason": true,
345 "has_reasoning_efforts": true,
346 "supports_attachments": false
347 },
348 {
349 "id": "deepcogito/cogito-v2-preview-llama-70b",
350 "name": "Deep Cogito: Cogito V2 Preview Llama 70B",
351 "cost_per_1m_in": 0.88,
352 "cost_per_1m_out": 0.88,
353 "cost_per_1m_in_cached": 0,
354 "cost_per_1m_out_cached": 0,
355 "context_window": 32768,
356 "default_max_tokens": 3276,
357 "can_reason": true,
358 "has_reasoning_efforts": true,
359 "supports_attachments": false
360 },
361 {
362 "id": "deepseek/deepseek-chat",
363 "name": "DeepSeek: DeepSeek V3",
364 "cost_per_1m_in": 0.39999999999999997,
365 "cost_per_1m_out": 1.3,
366 "cost_per_1m_in_cached": 0,
367 "cost_per_1m_out_cached": 0,
368 "context_window": 64000,
369 "default_max_tokens": 8000,
370 "can_reason": false,
371 "has_reasoning_efforts": false,
372 "supports_attachments": false
373 },
374 {
375 "id": "deepseek/deepseek-chat-v3-0324",
376 "name": "DeepSeek: DeepSeek V3 0324",
377 "cost_per_1m_in": 0.27,
378 "cost_per_1m_out": 1.12,
379 "cost_per_1m_in_cached": 0,
380 "cost_per_1m_out_cached": 0.135,
381 "context_window": 163840,
382 "default_max_tokens": 81920,
383 "can_reason": false,
384 "has_reasoning_efforts": false,
385 "supports_attachments": false
386 },
387 {
388 "id": "deepseek/deepseek-chat-v3-0324:free",
389 "name": "DeepSeek: DeepSeek V3 0324 (free)",
390 "cost_per_1m_in": 0,
391 "cost_per_1m_out": 0,
392 "cost_per_1m_in_cached": 0,
393 "cost_per_1m_out_cached": 0,
394 "context_window": 163840,
395 "default_max_tokens": 16384,
396 "can_reason": false,
397 "has_reasoning_efforts": false,
398 "supports_attachments": false
399 },
400 {
401 "id": "deepseek/deepseek-chat-v3.1",
402 "name": "DeepSeek: DeepSeek V3.1",
403 "cost_per_1m_in": 0.56,
404 "cost_per_1m_out": 1.68,
405 "cost_per_1m_in_cached": 0,
406 "cost_per_1m_out_cached": 0,
407 "context_window": 163840,
408 "default_max_tokens": 10240,
409 "can_reason": true,
410 "has_reasoning_efforts": true,
411 "supports_attachments": false
412 },
413 {
414 "id": "deepseek/deepseek-v3.1-terminus",
415 "name": "DeepSeek: DeepSeek V3.1 Terminus",
416 "cost_per_1m_in": 0.27,
417 "cost_per_1m_out": 1,
418 "cost_per_1m_in_cached": 0,
419 "cost_per_1m_out_cached": 0,
420 "context_window": 163840,
421 "default_max_tokens": 16384,
422 "can_reason": true,
423 "has_reasoning_efforts": true,
424 "supports_attachments": false
425 },
426 {
427 "id": "deepseek/deepseek-v3.1-terminus:exacto",
428 "name": "DeepSeek: DeepSeek V3.1 Terminus (exacto)",
429 "cost_per_1m_in": 0.27,
430 "cost_per_1m_out": 1,
431 "cost_per_1m_in_cached": 0,
432 "cost_per_1m_out_cached": 0,
433 "context_window": 163840,
434 "default_max_tokens": 16384,
435 "can_reason": true,
436 "has_reasoning_efforts": true,
437 "supports_attachments": false
438 },
439 {
440 "id": "deepseek/deepseek-v3.2-exp",
441 "name": "DeepSeek: DeepSeek V3.2 Exp",
442 "cost_per_1m_in": 0.27,
443 "cost_per_1m_out": 0.39999999999999997,
444 "cost_per_1m_in_cached": 0,
445 "cost_per_1m_out_cached": 0,
446 "context_window": 163840,
447 "default_max_tokens": 16384,
448 "can_reason": true,
449 "has_reasoning_efforts": true,
450 "supports_attachments": false
451 },
452 {
453 "id": "deepseek/deepseek-r1",
454 "name": "DeepSeek: R1",
455 "cost_per_1m_in": 0.7,
456 "cost_per_1m_out": 2.5,
457 "cost_per_1m_in_cached": 0,
458 "cost_per_1m_out_cached": 0,
459 "context_window": 64000,
460 "default_max_tokens": 8000,
461 "can_reason": true,
462 "has_reasoning_efforts": true,
463 "supports_attachments": false
464 },
465 {
466 "id": "deepseek/deepseek-r1-0528",
467 "name": "DeepSeek: R1 0528",
468 "cost_per_1m_in": 0.7999999999999999,
469 "cost_per_1m_out": 2.4,
470 "cost_per_1m_in_cached": 0,
471 "cost_per_1m_out_cached": 0,
472 "context_window": 163840,
473 "default_max_tokens": 16384,
474 "can_reason": true,
475 "has_reasoning_efforts": true,
476 "supports_attachments": false
477 },
478 {
479 "id": "google/gemini-2.0-flash-001",
480 "name": "Google: Gemini 2.0 Flash",
481 "cost_per_1m_in": 0.09999999999999999,
482 "cost_per_1m_out": 0.39999999999999997,
483 "cost_per_1m_in_cached": 0.18330000000000002,
484 "cost_per_1m_out_cached": 0.024999999999999998,
485 "context_window": 1048576,
486 "default_max_tokens": 4096,
487 "can_reason": false,
488 "has_reasoning_efforts": false,
489 "supports_attachments": true
490 },
491 {
492 "id": "google/gemini-2.0-flash-exp:free",
493 "name": "Google: Gemini 2.0 Flash Experimental (free)",
494 "cost_per_1m_in": 0,
495 "cost_per_1m_out": 0,
496 "cost_per_1m_in_cached": 0,
497 "cost_per_1m_out_cached": 0,
498 "context_window": 1048576,
499 "default_max_tokens": 4096,
500 "can_reason": false,
501 "has_reasoning_efforts": false,
502 "supports_attachments": true
503 },
504 {
505 "id": "google/gemini-2.0-flash-lite-001",
506 "name": "Google: Gemini 2.0 Flash Lite",
507 "cost_per_1m_in": 0.075,
508 "cost_per_1m_out": 0.3,
509 "cost_per_1m_in_cached": 0,
510 "cost_per_1m_out_cached": 0,
511 "context_window": 1048576,
512 "default_max_tokens": 4096,
513 "can_reason": false,
514 "has_reasoning_efforts": false,
515 "supports_attachments": true
516 },
517 {
518 "id": "google/gemini-2.5-flash",
519 "name": "Google: Gemini 2.5 Flash",
520 "cost_per_1m_in": 0.3,
521 "cost_per_1m_out": 2.5,
522 "cost_per_1m_in_cached": 0.3833,
523 "cost_per_1m_out_cached": 0.03,
524 "context_window": 1048576,
525 "default_max_tokens": 32767,
526 "can_reason": true,
527 "has_reasoning_efforts": true,
528 "supports_attachments": true
529 },
530 {
531 "id": "google/gemini-2.5-flash-lite",
532 "name": "Google: Gemini 2.5 Flash Lite",
533 "cost_per_1m_in": 0.09999999999999999,
534 "cost_per_1m_out": 0.39999999999999997,
535 "cost_per_1m_in_cached": 0.18330000000000002,
536 "cost_per_1m_out_cached": 0.01,
537 "context_window": 1048576,
538 "default_max_tokens": 32767,
539 "can_reason": true,
540 "has_reasoning_efforts": true,
541 "supports_attachments": true
542 },
543 {
544 "id": "google/gemini-2.5-flash-lite-preview-06-17",
545 "name": "Google: Gemini 2.5 Flash Lite Preview 06-17",
546 "cost_per_1m_in": 0.09999999999999999,
547 "cost_per_1m_out": 0.39999999999999997,
548 "cost_per_1m_in_cached": 0.18330000000000002,
549 "cost_per_1m_out_cached": 0.024999999999999998,
550 "context_window": 1048576,
551 "default_max_tokens": 32767,
552 "can_reason": true,
553 "has_reasoning_efforts": true,
554 "supports_attachments": true
555 },
556 {
557 "id": "google/gemini-2.5-flash-lite-preview-09-2025",
558 "name": "Google: Gemini 2.5 Flash Lite Preview 09-2025",
559 "cost_per_1m_in": 0.09999999999999999,
560 "cost_per_1m_out": 0.39999999999999997,
561 "cost_per_1m_in_cached": 0,
562 "cost_per_1m_out_cached": 0,
563 "context_window": 1048576,
564 "default_max_tokens": 32767,
565 "can_reason": true,
566 "has_reasoning_efforts": true,
567 "supports_attachments": true
568 },
569 {
570 "id": "google/gemini-2.5-flash-preview-09-2025",
571 "name": "Google: Gemini 2.5 Flash Preview 09-2025",
572 "cost_per_1m_in": 0.3,
573 "cost_per_1m_out": 2.5,
574 "cost_per_1m_in_cached": 0.3833,
575 "cost_per_1m_out_cached": 0.075,
576 "context_window": 1048576,
577 "default_max_tokens": 32767,
578 "can_reason": true,
579 "has_reasoning_efforts": true,
580 "supports_attachments": true
581 },
582 {
583 "id": "google/gemini-2.5-pro",
584 "name": "Google: Gemini 2.5 Pro",
585 "cost_per_1m_in": 1.25,
586 "cost_per_1m_out": 10,
587 "cost_per_1m_in_cached": 1.625,
588 "cost_per_1m_out_cached": 0.125,
589 "context_window": 1048576,
590 "default_max_tokens": 32768,
591 "can_reason": true,
592 "has_reasoning_efforts": true,
593 "supports_attachments": true
594 },
595 {
596 "id": "google/gemini-2.5-pro-preview-05-06",
597 "name": "Google: Gemini 2.5 Pro Preview 05-06",
598 "cost_per_1m_in": 1.25,
599 "cost_per_1m_out": 10,
600 "cost_per_1m_in_cached": 1.625,
601 "cost_per_1m_out_cached": 0.125,
602 "context_window": 1048576,
603 "default_max_tokens": 32768,
604 "can_reason": true,
605 "has_reasoning_efforts": true,
606 "supports_attachments": true
607 },
608 {
609 "id": "google/gemini-2.5-pro-preview",
610 "name": "Google: Gemini 2.5 Pro Preview 06-05",
611 "cost_per_1m_in": 1.25,
612 "cost_per_1m_out": 10,
613 "cost_per_1m_in_cached": 1.625,
614 "cost_per_1m_out_cached": 0.125,
615 "context_window": 1048576,
616 "default_max_tokens": 32768,
617 "can_reason": true,
618 "has_reasoning_efforts": true,
619 "supports_attachments": true
620 },
621 {
622 "id": "inception/mercury",
623 "name": "Inception: Mercury",
624 "cost_per_1m_in": 0.25,
625 "cost_per_1m_out": 1,
626 "cost_per_1m_in_cached": 0,
627 "cost_per_1m_out_cached": 0,
628 "context_window": 128000,
629 "default_max_tokens": 8192,
630 "can_reason": false,
631 "has_reasoning_efforts": false,
632 "supports_attachments": false
633 },
634 {
635 "id": "inception/mercury-coder",
636 "name": "Inception: Mercury Coder",
637 "cost_per_1m_in": 0.25,
638 "cost_per_1m_out": 1,
639 "cost_per_1m_in_cached": 0,
640 "cost_per_1m_out_cached": 0,
641 "context_window": 128000,
642 "default_max_tokens": 8192,
643 "can_reason": false,
644 "has_reasoning_efforts": false,
645 "supports_attachments": false
646 },
647 {
648 "id": "meituan/longcat-flash-chat:free",
649 "name": "Meituan: LongCat Flash Chat (free)",
650 "cost_per_1m_in": 0,
651 "cost_per_1m_out": 0,
652 "cost_per_1m_in_cached": 0,
653 "cost_per_1m_out_cached": 0,
654 "context_window": 131072,
655 "default_max_tokens": 65536,
656 "can_reason": false,
657 "has_reasoning_efforts": false,
658 "supports_attachments": false
659 },
660 {
661 "id": "meta-llama/llama-3-70b-instruct",
662 "name": "Meta: Llama 3 70B Instruct",
663 "cost_per_1m_in": 0.3,
664 "cost_per_1m_out": 0.39999999999999997,
665 "cost_per_1m_in_cached": 0,
666 "cost_per_1m_out_cached": 0,
667 "context_window": 8192,
668 "default_max_tokens": 8192,
669 "can_reason": false,
670 "has_reasoning_efforts": false,
671 "supports_attachments": false
672 },
673 {
674 "id": "meta-llama/llama-3-8b-instruct",
675 "name": "Meta: Llama 3 8B Instruct",
676 "cost_per_1m_in": 0.03,
677 "cost_per_1m_out": 0.06,
678 "cost_per_1m_in_cached": 0,
679 "cost_per_1m_out_cached": 0,
680 "context_window": 8192,
681 "default_max_tokens": 8192,
682 "can_reason": false,
683 "has_reasoning_efforts": false,
684 "supports_attachments": false
685 },
686 {
687 "id": "meta-llama/llama-3.1-405b-instruct",
688 "name": "Meta: Llama 3.1 405B Instruct",
689 "cost_per_1m_in": 1,
690 "cost_per_1m_out": 3,
691 "cost_per_1m_in_cached": 0,
692 "cost_per_1m_out_cached": 0,
693 "context_window": 131072,
694 "default_max_tokens": 13107,
695 "can_reason": false,
696 "has_reasoning_efforts": false,
697 "supports_attachments": false
698 },
699 {
700 "id": "meta-llama/llama-3.1-70b-instruct",
701 "name": "Meta: Llama 3.1 70B Instruct",
702 "cost_per_1m_in": 0.39999999999999997,
703 "cost_per_1m_out": 0.39999999999999997,
704 "cost_per_1m_in_cached": 0,
705 "cost_per_1m_out_cached": 0,
706 "context_window": 131072,
707 "default_max_tokens": 8192,
708 "can_reason": false,
709 "has_reasoning_efforts": false,
710 "supports_attachments": false
711 },
712 {
713 "id": "meta-llama/llama-3.1-8b-instruct",
714 "name": "Meta: Llama 3.1 8B Instruct",
715 "cost_per_1m_in": 0.02,
716 "cost_per_1m_out": 0.03,
717 "cost_per_1m_in_cached": 0,
718 "cost_per_1m_out_cached": 0,
719 "context_window": 131072,
720 "default_max_tokens": 8192,
721 "can_reason": false,
722 "has_reasoning_efforts": false,
723 "supports_attachments": false
724 },
725 {
726 "id": "meta-llama/llama-3.2-3b-instruct",
727 "name": "Meta: Llama 3.2 3B Instruct",
728 "cost_per_1m_in": 0.03,
729 "cost_per_1m_out": 0.049999999999999996,
730 "cost_per_1m_in_cached": 0,
731 "cost_per_1m_out_cached": 0,
732 "context_window": 32768,
733 "default_max_tokens": 16000,
734 "can_reason": false,
735 "has_reasoning_efforts": false,
736 "supports_attachments": false
737 },
738 {
739 "id": "meta-llama/llama-3.3-70b-instruct",
740 "name": "Meta: Llama 3.3 70B Instruct",
741 "cost_per_1m_in": 0.6,
742 "cost_per_1m_out": 0.6,
743 "cost_per_1m_in_cached": 0,
744 "cost_per_1m_out_cached": 0,
745 "context_window": 131072,
746 "default_max_tokens": 65536,
747 "can_reason": false,
748 "has_reasoning_efforts": false,
749 "supports_attachments": false
750 },
751 {
752 "id": "meta-llama/llama-3.3-70b-instruct:free",
753 "name": "Meta: Llama 3.3 70B Instruct (free)",
754 "cost_per_1m_in": 0,
755 "cost_per_1m_out": 0,
756 "cost_per_1m_in_cached": 0,
757 "cost_per_1m_out_cached": 0,
758 "context_window": 128000,
759 "default_max_tokens": 2014,
760 "can_reason": false,
761 "has_reasoning_efforts": false,
762 "supports_attachments": false
763 },
764 {
765 "id": "meta-llama/llama-3.3-8b-instruct:free",
766 "name": "Meta: Llama 3.3 8B Instruct (free)",
767 "cost_per_1m_in": 0,
768 "cost_per_1m_out": 0,
769 "cost_per_1m_in_cached": 0,
770 "cost_per_1m_out_cached": 0,
771 "context_window": 128000,
772 "default_max_tokens": 2014,
773 "can_reason": false,
774 "has_reasoning_efforts": false,
775 "supports_attachments": false
776 },
777 {
778 "id": "meta-llama/llama-4-maverick",
779 "name": "Meta: Llama 4 Maverick",
780 "cost_per_1m_in": 0.22,
781 "cost_per_1m_out": 0.88,
782 "cost_per_1m_in_cached": 0,
783 "cost_per_1m_out_cached": 0,
784 "context_window": 1048576,
785 "default_max_tokens": 65536,
786 "can_reason": false,
787 "has_reasoning_efforts": false,
788 "supports_attachments": true
789 },
790 {
791 "id": "meta-llama/llama-4-maverick:free",
792 "name": "Meta: Llama 4 Maverick (free)",
793 "cost_per_1m_in": 0,
794 "cost_per_1m_out": 0,
795 "cost_per_1m_in_cached": 0,
796 "cost_per_1m_out_cached": 0,
797 "context_window": 128000,
798 "default_max_tokens": 2014,
799 "can_reason": false,
800 "has_reasoning_efforts": false,
801 "supports_attachments": true
802 },
803 {
804 "id": "meta-llama/llama-4-scout",
805 "name": "Meta: Llama 4 Scout",
806 "cost_per_1m_in": 0.15,
807 "cost_per_1m_out": 0.6,
808 "cost_per_1m_in_cached": 0,
809 "cost_per_1m_out_cached": 0,
810 "context_window": 1048576,
811 "default_max_tokens": 104857,
812 "can_reason": false,
813 "has_reasoning_efforts": false,
814 "supports_attachments": true
815 },
816 {
817 "id": "meta-llama/llama-4-scout:free",
818 "name": "Meta: Llama 4 Scout (free)",
819 "cost_per_1m_in": 0,
820 "cost_per_1m_out": 0,
821 "cost_per_1m_in_cached": 0,
822 "cost_per_1m_out_cached": 0,
823 "context_window": 128000,
824 "default_max_tokens": 2014,
825 "can_reason": false,
826 "has_reasoning_efforts": false,
827 "supports_attachments": true
828 },
829 {
830 "id": "microsoft/phi-3-medium-128k-instruct",
831 "name": "Microsoft: Phi-3 Medium 128K Instruct",
832 "cost_per_1m_in": 1,
833 "cost_per_1m_out": 1,
834 "cost_per_1m_in_cached": 0,
835 "cost_per_1m_out_cached": 0,
836 "context_window": 128000,
837 "default_max_tokens": 12800,
838 "can_reason": false,
839 "has_reasoning_efforts": false,
840 "supports_attachments": false
841 },
842 {
843 "id": "microsoft/phi-3-mini-128k-instruct",
844 "name": "Microsoft: Phi-3 Mini 128K Instruct",
845 "cost_per_1m_in": 0.09999999999999999,
846 "cost_per_1m_out": 0.09999999999999999,
847 "cost_per_1m_in_cached": 0,
848 "cost_per_1m_out_cached": 0,
849 "context_window": 128000,
850 "default_max_tokens": 12800,
851 "can_reason": false,
852 "has_reasoning_efforts": false,
853 "supports_attachments": false
854 },
855 {
856 "id": "microsoft/phi-3.5-mini-128k-instruct",
857 "name": "Microsoft: Phi-3.5 Mini 128K Instruct",
858 "cost_per_1m_in": 0.09999999999999999,
859 "cost_per_1m_out": 0.09999999999999999,
860 "cost_per_1m_in_cached": 0,
861 "cost_per_1m_out_cached": 0,
862 "context_window": 128000,
863 "default_max_tokens": 12800,
864 "can_reason": false,
865 "has_reasoning_efforts": false,
866 "supports_attachments": false
867 },
868 {
869 "id": "minimax/minimax-m2:free",
870 "name": "MiniMax: MiniMax M2 (free)",
871 "cost_per_1m_in": 0,
872 "cost_per_1m_out": 0,
873 "cost_per_1m_in_cached": 0,
874 "cost_per_1m_out_cached": 0,
875 "context_window": 204800,
876 "default_max_tokens": 65536,
877 "can_reason": true,
878 "has_reasoning_efforts": true,
879 "supports_attachments": false
880 },
881 {
882 "id": "mistralai/mistral-large",
883 "name": "Mistral Large",
884 "cost_per_1m_in": 2,
885 "cost_per_1m_out": 6,
886 "cost_per_1m_in_cached": 0,
887 "cost_per_1m_out_cached": 0,
888 "context_window": 128000,
889 "default_max_tokens": 12800,
890 "can_reason": false,
891 "has_reasoning_efforts": false,
892 "supports_attachments": false
893 },
894 {
895 "id": "mistralai/mistral-large-2407",
896 "name": "Mistral Large 2407",
897 "cost_per_1m_in": 2,
898 "cost_per_1m_out": 6,
899 "cost_per_1m_in_cached": 0,
900 "cost_per_1m_out_cached": 0,
901 "context_window": 131072,
902 "default_max_tokens": 13107,
903 "can_reason": false,
904 "has_reasoning_efforts": false,
905 "supports_attachments": false
906 },
907 {
908 "id": "mistralai/mistral-large-2411",
909 "name": "Mistral Large 2411",
910 "cost_per_1m_in": 2,
911 "cost_per_1m_out": 6,
912 "cost_per_1m_in_cached": 0,
913 "cost_per_1m_out_cached": 0,
914 "context_window": 131072,
915 "default_max_tokens": 13107,
916 "can_reason": false,
917 "has_reasoning_efforts": false,
918 "supports_attachments": false
919 },
920 {
921 "id": "mistralai/mistral-small",
922 "name": "Mistral Small",
923 "cost_per_1m_in": 0.19999999999999998,
924 "cost_per_1m_out": 0.6,
925 "cost_per_1m_in_cached": 0,
926 "cost_per_1m_out_cached": 0,
927 "context_window": 32768,
928 "default_max_tokens": 3276,
929 "can_reason": false,
930 "has_reasoning_efforts": false,
931 "supports_attachments": false
932 },
933 {
934 "id": "mistralai/mistral-tiny",
935 "name": "Mistral Tiny",
936 "cost_per_1m_in": 0.25,
937 "cost_per_1m_out": 0.25,
938 "cost_per_1m_in_cached": 0,
939 "cost_per_1m_out_cached": 0,
940 "context_window": 32768,
941 "default_max_tokens": 3276,
942 "can_reason": false,
943 "has_reasoning_efforts": false,
944 "supports_attachments": false
945 },
946 {
947 "id": "mistralai/codestral-2501",
948 "name": "Mistral: Codestral 2501",
949 "cost_per_1m_in": 0.3,
950 "cost_per_1m_out": 0.8999999999999999,
951 "cost_per_1m_in_cached": 0,
952 "cost_per_1m_out_cached": 0,
953 "context_window": 262144,
954 "default_max_tokens": 26214,
955 "can_reason": false,
956 "has_reasoning_efforts": false,
957 "supports_attachments": false
958 },
959 {
960 "id": "mistralai/codestral-2508",
961 "name": "Mistral: Codestral 2508",
962 "cost_per_1m_in": 0.3,
963 "cost_per_1m_out": 0.8999999999999999,
964 "cost_per_1m_in_cached": 0,
965 "cost_per_1m_out_cached": 0,
966 "context_window": 256000,
967 "default_max_tokens": 25600,
968 "can_reason": false,
969 "has_reasoning_efforts": false,
970 "supports_attachments": false
971 },
972 {
973 "id": "mistralai/devstral-medium",
974 "name": "Mistral: Devstral Medium",
975 "cost_per_1m_in": 0.39999999999999997,
976 "cost_per_1m_out": 2,
977 "cost_per_1m_in_cached": 0,
978 "cost_per_1m_out_cached": 0,
979 "context_window": 131072,
980 "default_max_tokens": 13107,
981 "can_reason": false,
982 "has_reasoning_efforts": false,
983 "supports_attachments": false
984 },
985 {
986 "id": "mistralai/devstral-small-2505",
987 "name": "Mistral: Devstral Small 2505",
988 "cost_per_1m_in": 0.049999999999999996,
989 "cost_per_1m_out": 0.22,
990 "cost_per_1m_in_cached": 0,
991 "cost_per_1m_out_cached": 0,
992 "context_window": 131072,
993 "default_max_tokens": 65536,
994 "can_reason": false,
995 "has_reasoning_efforts": false,
996 "supports_attachments": false
997 },
998 {
999 "id": "mistralai/devstral-small-2505:free",
1000 "name": "Mistral: Devstral Small 2505 (free)",
1001 "cost_per_1m_in": 0,
1002 "cost_per_1m_out": 0,
1003 "cost_per_1m_in_cached": 0,
1004 "cost_per_1m_out_cached": 0,
1005 "context_window": 32768,
1006 "default_max_tokens": 3276,
1007 "can_reason": false,
1008 "has_reasoning_efforts": false,
1009 "supports_attachments": false
1010 },
1011 {
1012 "id": "mistralai/magistral-medium-2506",
1013 "name": "Mistral: Magistral Medium 2506",
1014 "cost_per_1m_in": 2,
1015 "cost_per_1m_out": 5,
1016 "cost_per_1m_in_cached": 0,
1017 "cost_per_1m_out_cached": 0,
1018 "context_window": 40960,
1019 "default_max_tokens": 20000,
1020 "can_reason": true,
1021 "has_reasoning_efforts": true,
1022 "supports_attachments": false
1023 },
1024 {
1025 "id": "mistralai/magistral-medium-2506:thinking",
1026 "name": "Mistral: Magistral Medium 2506 (thinking)",
1027 "cost_per_1m_in": 2,
1028 "cost_per_1m_out": 5,
1029 "cost_per_1m_in_cached": 0,
1030 "cost_per_1m_out_cached": 0,
1031 "context_window": 40960,
1032 "default_max_tokens": 20000,
1033 "can_reason": true,
1034 "has_reasoning_efforts": true,
1035 "supports_attachments": false
1036 },
1037 {
1038 "id": "mistralai/magistral-small-2506",
1039 "name": "Mistral: Magistral Small 2506",
1040 "cost_per_1m_in": 0.5,
1041 "cost_per_1m_out": 1.5,
1042 "cost_per_1m_in_cached": 0,
1043 "cost_per_1m_out_cached": 0,
1044 "context_window": 40000,
1045 "default_max_tokens": 20000,
1046 "can_reason": true,
1047 "has_reasoning_efforts": true,
1048 "supports_attachments": false
1049 },
1050 {
1051 "id": "mistralai/ministral-8b",
1052 "name": "Mistral: Ministral 8B",
1053 "cost_per_1m_in": 0.09999999999999999,
1054 "cost_per_1m_out": 0.09999999999999999,
1055 "cost_per_1m_in_cached": 0,
1056 "cost_per_1m_out_cached": 0,
1057 "context_window": 131072,
1058 "default_max_tokens": 13107,
1059 "can_reason": false,
1060 "has_reasoning_efforts": false,
1061 "supports_attachments": false
1062 },
1063 {
1064 "id": "mistralai/mistral-7b-instruct",
1065 "name": "Mistral: Mistral 7B Instruct",
1066 "cost_per_1m_in": 0.028,
1067 "cost_per_1m_out": 0.054,
1068 "cost_per_1m_in_cached": 0,
1069 "cost_per_1m_out_cached": 0,
1070 "context_window": 32768,
1071 "default_max_tokens": 8192,
1072 "can_reason": false,
1073 "has_reasoning_efforts": false,
1074 "supports_attachments": false
1075 },
1076 {
1077 "id": "mistralai/mistral-7b-instruct:free",
1078 "name": "Mistral: Mistral 7B Instruct (free)",
1079 "cost_per_1m_in": 0,
1080 "cost_per_1m_out": 0,
1081 "cost_per_1m_in_cached": 0,
1082 "cost_per_1m_out_cached": 0,
1083 "context_window": 32768,
1084 "default_max_tokens": 8192,
1085 "can_reason": false,
1086 "has_reasoning_efforts": false,
1087 "supports_attachments": false
1088 },
1089 {
1090 "id": "mistralai/mistral-7b-instruct-v0.3",
1091 "name": "Mistral: Mistral 7B Instruct v0.3",
1092 "cost_per_1m_in": 0.028,
1093 "cost_per_1m_out": 0.054,
1094 "cost_per_1m_in_cached": 0,
1095 "cost_per_1m_out_cached": 0,
1096 "context_window": 32768,
1097 "default_max_tokens": 8192,
1098 "can_reason": false,
1099 "has_reasoning_efforts": false,
1100 "supports_attachments": false
1101 },
1102 {
1103 "id": "mistralai/mistral-medium-3",
1104 "name": "Mistral: Mistral Medium 3",
1105 "cost_per_1m_in": 0.39999999999999997,
1106 "cost_per_1m_out": 2,
1107 "cost_per_1m_in_cached": 0,
1108 "cost_per_1m_out_cached": 0,
1109 "context_window": 131072,
1110 "default_max_tokens": 13107,
1111 "can_reason": false,
1112 "has_reasoning_efforts": false,
1113 "supports_attachments": true
1114 },
1115 {
1116 "id": "mistralai/mistral-medium-3.1",
1117 "name": "Mistral: Mistral Medium 3.1",
1118 "cost_per_1m_in": 0.39999999999999997,
1119 "cost_per_1m_out": 2,
1120 "cost_per_1m_in_cached": 0,
1121 "cost_per_1m_out_cached": 0,
1122 "context_window": 131072,
1123 "default_max_tokens": 13107,
1124 "can_reason": false,
1125 "has_reasoning_efforts": false,
1126 "supports_attachments": true
1127 },
1128 {
1129 "id": "mistralai/mistral-nemo",
1130 "name": "Mistral: Mistral Nemo",
1131 "cost_per_1m_in": 0.15,
1132 "cost_per_1m_out": 0.15,
1133 "cost_per_1m_in_cached": 0,
1134 "cost_per_1m_out_cached": 0,
1135 "context_window": 131072,
1136 "default_max_tokens": 13107,
1137 "can_reason": false,
1138 "has_reasoning_efforts": false,
1139 "supports_attachments": false
1140 },
1141 {
1142 "id": "mistralai/mistral-small-24b-instruct-2501",
1143 "name": "Mistral: Mistral Small 3",
1144 "cost_per_1m_in": 0.09999999999999999,
1145 "cost_per_1m_out": 0.3,
1146 "cost_per_1m_in_cached": 0,
1147 "cost_per_1m_out_cached": 0,
1148 "context_window": 32768,
1149 "default_max_tokens": 3276,
1150 "can_reason": false,
1151 "has_reasoning_efforts": false,
1152 "supports_attachments": false
1153 },
1154 {
1155 "id": "mistralai/mistral-small-3.1-24b-instruct",
1156 "name": "Mistral: Mistral Small 3.1 24B",
1157 "cost_per_1m_in": 0.09999999999999999,
1158 "cost_per_1m_out": 0.3,
1159 "cost_per_1m_in_cached": 0,
1160 "cost_per_1m_out_cached": 0,
1161 "context_window": 131072,
1162 "default_max_tokens": 13107,
1163 "can_reason": false,
1164 "has_reasoning_efforts": false,
1165 "supports_attachments": true
1166 },
1167 {
1168 "id": "mistralai/mistral-small-3.1-24b-instruct:free",
1169 "name": "Mistral: Mistral Small 3.1 24B (free)",
1170 "cost_per_1m_in": 0,
1171 "cost_per_1m_out": 0,
1172 "cost_per_1m_in_cached": 0,
1173 "cost_per_1m_out_cached": 0,
1174 "context_window": 96000,
1175 "default_max_tokens": 48000,
1176 "can_reason": false,
1177 "has_reasoning_efforts": false,
1178 "supports_attachments": true
1179 },
1180 {
1181 "id": "mistralai/mistral-small-3.2-24b-instruct",
1182 "name": "Mistral: Mistral Small 3.2 24B",
1183 "cost_per_1m_in": 0.09999999999999999,
1184 "cost_per_1m_out": 0.3,
1185 "cost_per_1m_in_cached": 0,
1186 "cost_per_1m_out_cached": 0,
1187 "context_window": 131072,
1188 "default_max_tokens": 13107,
1189 "can_reason": false,
1190 "has_reasoning_efforts": false,
1191 "supports_attachments": true
1192 },
1193 {
1194 "id": "mistralai/mistral-small-3.2-24b-instruct:free",
1195 "name": "Mistral: Mistral Small 3.2 24B (free)",
1196 "cost_per_1m_in": 0,
1197 "cost_per_1m_out": 0,
1198 "cost_per_1m_in_cached": 0,
1199 "cost_per_1m_out_cached": 0,
1200 "context_window": 131072,
1201 "default_max_tokens": 13107,
1202 "can_reason": false,
1203 "has_reasoning_efforts": false,
1204 "supports_attachments": true
1205 },
1206 {
1207 "id": "mistralai/mixtral-8x22b-instruct",
1208 "name": "Mistral: Mixtral 8x22B Instruct",
1209 "cost_per_1m_in": 2,
1210 "cost_per_1m_out": 6,
1211 "cost_per_1m_in_cached": 0,
1212 "cost_per_1m_out_cached": 0,
1213 "context_window": 65536,
1214 "default_max_tokens": 6553,
1215 "can_reason": false,
1216 "has_reasoning_efforts": false,
1217 "supports_attachments": false
1218 },
1219 {
1220 "id": "mistralai/mixtral-8x7b-instruct",
1221 "name": "Mistral: Mixtral 8x7B Instruct",
1222 "cost_per_1m_in": 0.54,
1223 "cost_per_1m_out": 0.54,
1224 "cost_per_1m_in_cached": 0,
1225 "cost_per_1m_out_cached": 0,
1226 "context_window": 32768,
1227 "default_max_tokens": 8192,
1228 "can_reason": false,
1229 "has_reasoning_efforts": false,
1230 "supports_attachments": false
1231 },
1232 {
1233 "id": "mistralai/pixtral-12b",
1234 "name": "Mistral: Pixtral 12B",
1235 "cost_per_1m_in": 0.15,
1236 "cost_per_1m_out": 0.15,
1237 "cost_per_1m_in_cached": 0,
1238 "cost_per_1m_out_cached": 0,
1239 "context_window": 131072,
1240 "default_max_tokens": 13107,
1241 "can_reason": false,
1242 "has_reasoning_efforts": false,
1243 "supports_attachments": true
1244 },
1245 {
1246 "id": "mistralai/pixtral-large-2411",
1247 "name": "Mistral: Pixtral Large 2411",
1248 "cost_per_1m_in": 2,
1249 "cost_per_1m_out": 6,
1250 "cost_per_1m_in_cached": 0,
1251 "cost_per_1m_out_cached": 0,
1252 "context_window": 131072,
1253 "default_max_tokens": 13107,
1254 "can_reason": false,
1255 "has_reasoning_efforts": false,
1256 "supports_attachments": true
1257 },
1258 {
1259 "id": "mistralai/mistral-saba",
1260 "name": "Mistral: Saba",
1261 "cost_per_1m_in": 0.19999999999999998,
1262 "cost_per_1m_out": 0.6,
1263 "cost_per_1m_in_cached": 0,
1264 "cost_per_1m_out_cached": 0,
1265 "context_window": 32768,
1266 "default_max_tokens": 3276,
1267 "can_reason": false,
1268 "has_reasoning_efforts": false,
1269 "supports_attachments": false
1270 },
1271 {
1272 "id": "moonshotai/kimi-k2",
1273 "name": "MoonshotAI: Kimi K2 0711",
1274 "cost_per_1m_in": 0.6,
1275 "cost_per_1m_out": 2.5,
1276 "cost_per_1m_in_cached": 0,
1277 "cost_per_1m_out_cached": 0,
1278 "context_window": 131072,
1279 "default_max_tokens": 8192,
1280 "can_reason": false,
1281 "has_reasoning_efforts": false,
1282 "supports_attachments": false
1283 },
1284 {
1285 "id": "moonshotai/kimi-k2-0905",
1286 "name": "MoonshotAI: Kimi K2 0905",
1287 "cost_per_1m_in": 0.5,
1288 "cost_per_1m_out": 2,
1289 "cost_per_1m_in_cached": 0,
1290 "cost_per_1m_out_cached": 0,
1291 "context_window": 262144,
1292 "default_max_tokens": 26214,
1293 "can_reason": false,
1294 "has_reasoning_efforts": false,
1295 "supports_attachments": false
1296 },
1297 {
1298 "id": "moonshotai/kimi-k2-0905:exacto",
1299 "name": "MoonshotAI: Kimi K2 0905 (exacto)",
1300 "cost_per_1m_in": 0.6,
1301 "cost_per_1m_out": 2.5,
1302 "cost_per_1m_in_cached": 0,
1303 "cost_per_1m_out_cached": 0,
1304 "context_window": 262144,
1305 "default_max_tokens": 26214,
1306 "can_reason": false,
1307 "has_reasoning_efforts": false,
1308 "supports_attachments": false
1309 },
1310 {
1311 "id": "nvidia/llama-3.1-nemotron-70b-instruct",
1312 "name": "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
1313 "cost_per_1m_in": 0.6,
1314 "cost_per_1m_out": 0.6,
1315 "cost_per_1m_in_cached": 0,
1316 "cost_per_1m_out_cached": 0,
1317 "context_window": 131072,
1318 "default_max_tokens": 8192,
1319 "can_reason": false,
1320 "has_reasoning_efforts": false,
1321 "supports_attachments": false
1322 },
1323 {
1324 "id": "nvidia/llama-3.3-nemotron-super-49b-v1.5",
1325 "name": "NVIDIA: Llama 3.3 Nemotron Super 49B V1.5",
1326 "cost_per_1m_in": 0.09999999999999999,
1327 "cost_per_1m_out": 0.39999999999999997,
1328 "cost_per_1m_in_cached": 0,
1329 "cost_per_1m_out_cached": 0,
1330 "context_window": 131072,
1331 "default_max_tokens": 13107,
1332 "can_reason": true,
1333 "has_reasoning_efforts": true,
1334 "supports_attachments": false
1335 },
1336 {
1337 "id": "nvidia/nemotron-nano-9b-v2",
1338 "name": "NVIDIA: Nemotron Nano 9B V2",
1339 "cost_per_1m_in": 0.04,
1340 "cost_per_1m_out": 0.16,
1341 "cost_per_1m_in_cached": 0,
1342 "cost_per_1m_out_cached": 0,
1343 "context_window": 131072,
1344 "default_max_tokens": 13107,
1345 "can_reason": true,
1346 "has_reasoning_efforts": true,
1347 "supports_attachments": false
1348 },
1349 {
1350 "id": "nvidia/nemotron-nano-9b-v2:free",
1351 "name": "NVIDIA: Nemotron Nano 9B V2 (free)",
1352 "cost_per_1m_in": 0,
1353 "cost_per_1m_out": 0,
1354 "cost_per_1m_in_cached": 0,
1355 "cost_per_1m_out_cached": 0,
1356 "context_window": 128000,
1357 "default_max_tokens": 12800,
1358 "can_reason": true,
1359 "has_reasoning_efforts": true,
1360 "supports_attachments": false
1361 },
1362 {
1363 "id": "nousresearch/deephermes-3-llama-3-8b-preview",
1364 "name": "Nous: DeepHermes 3 Llama 3 8B Preview",
1365 "cost_per_1m_in": 0.03,
1366 "cost_per_1m_out": 0.11,
1367 "cost_per_1m_in_cached": 0,
1368 "cost_per_1m_out_cached": 0,
1369 "context_window": 131072,
1370 "default_max_tokens": 65536,
1371 "can_reason": false,
1372 "has_reasoning_efforts": false,
1373 "supports_attachments": false
1374 },
1375 {
1376 "id": "nousresearch/deephermes-3-mistral-24b-preview",
1377 "name": "Nous: DeepHermes 3 Mistral 24B Preview",
1378 "cost_per_1m_in": 0.15,
1379 "cost_per_1m_out": 0.59,
1380 "cost_per_1m_in_cached": 0,
1381 "cost_per_1m_out_cached": 0,
1382 "context_window": 32768,
1383 "default_max_tokens": 16384,
1384 "can_reason": true,
1385 "has_reasoning_efforts": true,
1386 "supports_attachments": false
1387 },
1388 {
1389 "id": "nousresearch/hermes-3-llama-3.1-70b",
1390 "name": "Nous: Hermes 3 70B Instruct",
1391 "cost_per_1m_in": 0.39999999999999997,
1392 "cost_per_1m_out": 0.39999999999999997,
1393 "cost_per_1m_in_cached": 0,
1394 "cost_per_1m_out_cached": 0,
1395 "context_window": 12288,
1396 "default_max_tokens": 1228,
1397 "can_reason": false,
1398 "has_reasoning_efforts": false,
1399 "supports_attachments": false
1400 },
1401 {
1402 "id": "openai/codex-mini",
1403 "name": "OpenAI: Codex Mini",
1404 "cost_per_1m_in": 1.5,
1405 "cost_per_1m_out": 6,
1406 "cost_per_1m_in_cached": 0,
1407 "cost_per_1m_out_cached": 0.375,
1408 "context_window": 200000,
1409 "default_max_tokens": 50000,
1410 "can_reason": true,
1411 "has_reasoning_efforts": true,
1412 "supports_attachments": true
1413 },
1414 {
1415 "id": "openai/gpt-3.5-turbo",
1416 "name": "OpenAI: GPT-3.5 Turbo",
1417 "cost_per_1m_in": 0.5,
1418 "cost_per_1m_out": 1.5,
1419 "cost_per_1m_in_cached": 0,
1420 "cost_per_1m_out_cached": 0,
1421 "context_window": 16385,
1422 "default_max_tokens": 2048,
1423 "can_reason": false,
1424 "has_reasoning_efforts": false,
1425 "supports_attachments": false
1426 },
1427 {
1428 "id": "openai/gpt-3.5-turbo-0613",
1429 "name": "OpenAI: GPT-3.5 Turbo (older v0613)",
1430 "cost_per_1m_in": 1,
1431 "cost_per_1m_out": 2,
1432 "cost_per_1m_in_cached": 0,
1433 "cost_per_1m_out_cached": 0,
1434 "context_window": 4095,
1435 "default_max_tokens": 2048,
1436 "can_reason": false,
1437 "has_reasoning_efforts": false,
1438 "supports_attachments": false
1439 },
1440 {
1441 "id": "openai/gpt-3.5-turbo-16k",
1442 "name": "OpenAI: GPT-3.5 Turbo 16k",
1443 "cost_per_1m_in": 3,
1444 "cost_per_1m_out": 4,
1445 "cost_per_1m_in_cached": 0,
1446 "cost_per_1m_out_cached": 0,
1447 "context_window": 16385,
1448 "default_max_tokens": 2048,
1449 "can_reason": false,
1450 "has_reasoning_efforts": false,
1451 "supports_attachments": false
1452 },
1453 {
1454 "id": "openai/gpt-4",
1455 "name": "OpenAI: GPT-4",
1456 "cost_per_1m_in": 30,
1457 "cost_per_1m_out": 60,
1458 "cost_per_1m_in_cached": 0,
1459 "cost_per_1m_out_cached": 0,
1460 "context_window": 8191,
1461 "default_max_tokens": 2048,
1462 "can_reason": false,
1463 "has_reasoning_efforts": false,
1464 "supports_attachments": false
1465 },
1466 {
1467 "id": "openai/gpt-4-0314",
1468 "name": "OpenAI: GPT-4 (older v0314)",
1469 "cost_per_1m_in": 30,
1470 "cost_per_1m_out": 60,
1471 "cost_per_1m_in_cached": 0,
1472 "cost_per_1m_out_cached": 0,
1473 "context_window": 8191,
1474 "default_max_tokens": 2048,
1475 "can_reason": false,
1476 "has_reasoning_efforts": false,
1477 "supports_attachments": false
1478 },
1479 {
1480 "id": "openai/gpt-4-turbo",
1481 "name": "OpenAI: GPT-4 Turbo",
1482 "cost_per_1m_in": 10,
1483 "cost_per_1m_out": 30,
1484 "cost_per_1m_in_cached": 0,
1485 "cost_per_1m_out_cached": 0,
1486 "context_window": 128000,
1487 "default_max_tokens": 2048,
1488 "can_reason": false,
1489 "has_reasoning_efforts": false,
1490 "supports_attachments": true
1491 },
1492 {
1493 "id": "openai/gpt-4-1106-preview",
1494 "name": "OpenAI: GPT-4 Turbo (older v1106)",
1495 "cost_per_1m_in": 10,
1496 "cost_per_1m_out": 30,
1497 "cost_per_1m_in_cached": 0,
1498 "cost_per_1m_out_cached": 0,
1499 "context_window": 128000,
1500 "default_max_tokens": 2048,
1501 "can_reason": false,
1502 "has_reasoning_efforts": false,
1503 "supports_attachments": false
1504 },
1505 {
1506 "id": "openai/gpt-4-turbo-preview",
1507 "name": "OpenAI: GPT-4 Turbo Preview",
1508 "cost_per_1m_in": 10,
1509 "cost_per_1m_out": 30,
1510 "cost_per_1m_in_cached": 0,
1511 "cost_per_1m_out_cached": 0,
1512 "context_window": 128000,
1513 "default_max_tokens": 2048,
1514 "can_reason": false,
1515 "has_reasoning_efforts": false,
1516 "supports_attachments": false
1517 },
1518 {
1519 "id": "openai/gpt-4.1",
1520 "name": "OpenAI: GPT-4.1",
1521 "cost_per_1m_in": 2,
1522 "cost_per_1m_out": 8,
1523 "cost_per_1m_in_cached": 0,
1524 "cost_per_1m_out_cached": 0.5,
1525 "context_window": 1047576,
1526 "default_max_tokens": 16384,
1527 "can_reason": false,
1528 "has_reasoning_efforts": false,
1529 "supports_attachments": true
1530 },
1531 {
1532 "id": "openai/gpt-4.1-mini",
1533 "name": "OpenAI: GPT-4.1 Mini",
1534 "cost_per_1m_in": 0.39999999999999997,
1535 "cost_per_1m_out": 1.5999999999999999,
1536 "cost_per_1m_in_cached": 0,
1537 "cost_per_1m_out_cached": 0.09999999999999999,
1538 "context_window": 1047576,
1539 "default_max_tokens": 104757,
1540 "can_reason": false,
1541 "has_reasoning_efforts": false,
1542 "supports_attachments": true
1543 },
1544 {
1545 "id": "openai/gpt-4.1-nano",
1546 "name": "OpenAI: GPT-4.1 Nano",
1547 "cost_per_1m_in": 0.09999999999999999,
1548 "cost_per_1m_out": 0.39999999999999997,
1549 "cost_per_1m_in_cached": 0,
1550 "cost_per_1m_out_cached": 0.024999999999999998,
1551 "context_window": 1047576,
1552 "default_max_tokens": 16384,
1553 "can_reason": false,
1554 "has_reasoning_efforts": false,
1555 "supports_attachments": true
1556 },
1557 {
1558 "id": "openai/gpt-4o",
1559 "name": "OpenAI: GPT-4o",
1560 "cost_per_1m_in": 2.5,
1561 "cost_per_1m_out": 10,
1562 "cost_per_1m_in_cached": 0,
1563 "cost_per_1m_out_cached": 0,
1564 "context_window": 128000,
1565 "default_max_tokens": 8192,
1566 "can_reason": false,
1567 "has_reasoning_efforts": false,
1568 "supports_attachments": true
1569 },
1570 {
1571 "id": "openai/gpt-4o-2024-05-13",
1572 "name": "OpenAI: GPT-4o (2024-05-13)",
1573 "cost_per_1m_in": 5,
1574 "cost_per_1m_out": 15,
1575 "cost_per_1m_in_cached": 0,
1576 "cost_per_1m_out_cached": 0,
1577 "context_window": 128000,
1578 "default_max_tokens": 2048,
1579 "can_reason": false,
1580 "has_reasoning_efforts": false,
1581 "supports_attachments": true
1582 },
1583 {
1584 "id": "openai/gpt-4o-2024-08-06",
1585 "name": "OpenAI: GPT-4o (2024-08-06)",
1586 "cost_per_1m_in": 2.5,
1587 "cost_per_1m_out": 10,
1588 "cost_per_1m_in_cached": 0,
1589 "cost_per_1m_out_cached": 1.25,
1590 "context_window": 128000,
1591 "default_max_tokens": 8192,
1592 "can_reason": false,
1593 "has_reasoning_efforts": false,
1594 "supports_attachments": true
1595 },
1596 {
1597 "id": "openai/gpt-4o-2024-11-20",
1598 "name": "OpenAI: GPT-4o (2024-11-20)",
1599 "cost_per_1m_in": 2.5,
1600 "cost_per_1m_out": 10,
1601 "cost_per_1m_in_cached": 0,
1602 "cost_per_1m_out_cached": 1.25,
1603 "context_window": 128000,
1604 "default_max_tokens": 8192,
1605 "can_reason": false,
1606 "has_reasoning_efforts": false,
1607 "supports_attachments": true
1608 },
1609 {
1610 "id": "openai/gpt-4o:extended",
1611 "name": "OpenAI: GPT-4o (extended)",
1612 "cost_per_1m_in": 6,
1613 "cost_per_1m_out": 18,
1614 "cost_per_1m_in_cached": 0,
1615 "cost_per_1m_out_cached": 0,
1616 "context_window": 128000,
1617 "default_max_tokens": 32000,
1618 "can_reason": false,
1619 "has_reasoning_efforts": false,
1620 "supports_attachments": true
1621 },
1622 {
1623 "id": "openai/gpt-4o-audio-preview",
1624 "name": "OpenAI: GPT-4o Audio",
1625 "cost_per_1m_in": 2.5,
1626 "cost_per_1m_out": 10,
1627 "cost_per_1m_in_cached": 0,
1628 "cost_per_1m_out_cached": 0,
1629 "context_window": 128000,
1630 "default_max_tokens": 8192,
1631 "can_reason": false,
1632 "has_reasoning_efforts": false,
1633 "supports_attachments": false
1634 },
1635 {
1636 "id": "openai/gpt-4o-mini",
1637 "name": "OpenAI: GPT-4o-mini",
1638 "cost_per_1m_in": 0.15,
1639 "cost_per_1m_out": 0.6,
1640 "cost_per_1m_in_cached": 0,
1641 "cost_per_1m_out_cached": 0.075,
1642 "context_window": 128000,
1643 "default_max_tokens": 8192,
1644 "can_reason": false,
1645 "has_reasoning_efforts": false,
1646 "supports_attachments": true
1647 },
1648 {
1649 "id": "openai/gpt-4o-mini-2024-07-18",
1650 "name": "OpenAI: GPT-4o-mini (2024-07-18)",
1651 "cost_per_1m_in": 0.15,
1652 "cost_per_1m_out": 0.6,
1653 "cost_per_1m_in_cached": 0,
1654 "cost_per_1m_out_cached": 0.075,
1655 "context_window": 128000,
1656 "default_max_tokens": 8192,
1657 "can_reason": false,
1658 "has_reasoning_efforts": false,
1659 "supports_attachments": true
1660 },
1661 {
1662 "id": "openai/gpt-5",
1663 "name": "OpenAI: GPT-5",
1664 "cost_per_1m_in": 1.25,
1665 "cost_per_1m_out": 10,
1666 "cost_per_1m_in_cached": 0,
1667 "cost_per_1m_out_cached": 0.125,
1668 "context_window": 400000,
1669 "default_max_tokens": 64000,
1670 "can_reason": true,
1671 "has_reasoning_efforts": true,
1672 "supports_attachments": true
1673 },
1674 {
1675 "id": "openai/gpt-5-codex",
1676 "name": "OpenAI: GPT-5 Codex",
1677 "cost_per_1m_in": 1.25,
1678 "cost_per_1m_out": 10,
1679 "cost_per_1m_in_cached": 0,
1680 "cost_per_1m_out_cached": 0.125,
1681 "context_window": 400000,
1682 "default_max_tokens": 64000,
1683 "can_reason": true,
1684 "has_reasoning_efforts": true,
1685 "supports_attachments": true
1686 },
1687 {
1688 "id": "openai/gpt-5-image",
1689 "name": "OpenAI: GPT-5 Image",
1690 "cost_per_1m_in": 10,
1691 "cost_per_1m_out": 10,
1692 "cost_per_1m_in_cached": 0,
1693 "cost_per_1m_out_cached": 1.25,
1694 "context_window": 400000,
1695 "default_max_tokens": 64000,
1696 "can_reason": true,
1697 "has_reasoning_efforts": true,
1698 "supports_attachments": true
1699 },
1700 {
1701 "id": "openai/gpt-5-image-mini",
1702 "name": "OpenAI: GPT-5 Image Mini",
1703 "cost_per_1m_in": 2.5,
1704 "cost_per_1m_out": 2,
1705 "cost_per_1m_in_cached": 0,
1706 "cost_per_1m_out_cached": 0.25,
1707 "context_window": 400000,
1708 "default_max_tokens": 64000,
1709 "can_reason": true,
1710 "has_reasoning_efforts": true,
1711 "supports_attachments": true
1712 },
1713 {
1714 "id": "openai/gpt-5-mini",
1715 "name": "OpenAI: GPT-5 Mini",
1716 "cost_per_1m_in": 0.25,
1717 "cost_per_1m_out": 2,
1718 "cost_per_1m_in_cached": 0,
1719 "cost_per_1m_out_cached": 0.024999999999999998,
1720 "context_window": 400000,
1721 "default_max_tokens": 64000,
1722 "can_reason": true,
1723 "has_reasoning_efforts": true,
1724 "supports_attachments": true
1725 },
1726 {
1727 "id": "openai/gpt-5-nano",
1728 "name": "OpenAI: GPT-5 Nano",
1729 "cost_per_1m_in": 0.049999999999999996,
1730 "cost_per_1m_out": 0.39999999999999997,
1731 "cost_per_1m_in_cached": 0,
1732 "cost_per_1m_out_cached": 0.01,
1733 "context_window": 400000,
1734 "default_max_tokens": 40000,
1735 "can_reason": true,
1736 "has_reasoning_efforts": true,
1737 "supports_attachments": true
1738 },
1739 {
1740 "id": "openai/gpt-5-pro",
1741 "name": "OpenAI: GPT-5 Pro",
1742 "cost_per_1m_in": 15,
1743 "cost_per_1m_out": 120,
1744 "cost_per_1m_in_cached": 0,
1745 "cost_per_1m_out_cached": 0,
1746 "context_window": 400000,
1747 "default_max_tokens": 64000,
1748 "can_reason": true,
1749 "has_reasoning_efforts": true,
1750 "supports_attachments": true
1751 },
1752 {
1753 "id": "openai/gpt-oss-120b",
1754 "name": "OpenAI: gpt-oss-120b",
1755 "cost_per_1m_in": 0.04,
1756 "cost_per_1m_out": 0.39999999999999997,
1757 "cost_per_1m_in_cached": 0,
1758 "cost_per_1m_out_cached": 0,
1759 "context_window": 131072,
1760 "default_max_tokens": 65536,
1761 "can_reason": true,
1762 "has_reasoning_efforts": true,
1763 "supports_attachments": false
1764 },
1765 {
1766 "id": "openai/gpt-oss-120b:exacto",
1767 "name": "OpenAI: gpt-oss-120b (exacto)",
1768 "cost_per_1m_in": 0.049999999999999996,
1769 "cost_per_1m_out": 0.24,
1770 "cost_per_1m_in_cached": 0,
1771 "cost_per_1m_out_cached": 0,
1772 "context_window": 131072,
1773 "default_max_tokens": 13107,
1774 "can_reason": true,
1775 "has_reasoning_efforts": true,
1776 "supports_attachments": false
1777 },
1778 {
1779 "id": "openai/gpt-oss-20b",
1780 "name": "OpenAI: gpt-oss-20b",
1781 "cost_per_1m_in": 0.04,
1782 "cost_per_1m_out": 0.15,
1783 "cost_per_1m_in_cached": 0,
1784 "cost_per_1m_out_cached": 0,
1785 "context_window": 131072,
1786 "default_max_tokens": 13107,
1787 "can_reason": true,
1788 "has_reasoning_efforts": true,
1789 "supports_attachments": false
1790 },
1791 {
1792 "id": "openai/gpt-oss-20b:free",
1793 "name": "OpenAI: gpt-oss-20b (free)",
1794 "cost_per_1m_in": 0,
1795 "cost_per_1m_out": 0,
1796 "cost_per_1m_in_cached": 0,
1797 "cost_per_1m_out_cached": 0,
1798 "context_window": 131072,
1799 "default_max_tokens": 65536,
1800 "can_reason": true,
1801 "has_reasoning_efforts": true,
1802 "supports_attachments": false
1803 },
1804 {
1805 "id": "openai/o1",
1806 "name": "OpenAI: o1",
1807 "cost_per_1m_in": 15,
1808 "cost_per_1m_out": 60,
1809 "cost_per_1m_in_cached": 0,
1810 "cost_per_1m_out_cached": 7.5,
1811 "context_window": 200000,
1812 "default_max_tokens": 50000,
1813 "can_reason": false,
1814 "has_reasoning_efforts": false,
1815 "supports_attachments": true
1816 },
1817 {
1818 "id": "openai/o3",
1819 "name": "OpenAI: o3",
1820 "cost_per_1m_in": 2,
1821 "cost_per_1m_out": 8,
1822 "cost_per_1m_in_cached": 0,
1823 "cost_per_1m_out_cached": 0.5,
1824 "context_window": 200000,
1825 "default_max_tokens": 50000,
1826 "can_reason": true,
1827 "has_reasoning_efforts": true,
1828 "supports_attachments": true
1829 },
1830 {
1831 "id": "openai/o3-deep-research",
1832 "name": "OpenAI: o3 Deep Research",
1833 "cost_per_1m_in": 10,
1834 "cost_per_1m_out": 40,
1835 "cost_per_1m_in_cached": 0,
1836 "cost_per_1m_out_cached": 2.5,
1837 "context_window": 200000,
1838 "default_max_tokens": 50000,
1839 "can_reason": true,
1840 "has_reasoning_efforts": true,
1841 "supports_attachments": true
1842 },
1843 {
1844 "id": "openai/o3-mini",
1845 "name": "OpenAI: o3 Mini",
1846 "cost_per_1m_in": 1.1,
1847 "cost_per_1m_out": 4.4,
1848 "cost_per_1m_in_cached": 0,
1849 "cost_per_1m_out_cached": 0.55,
1850 "context_window": 200000,
1851 "default_max_tokens": 50000,
1852 "can_reason": false,
1853 "has_reasoning_efforts": false,
1854 "supports_attachments": false
1855 },
1856 {
1857 "id": "openai/o3-mini-high",
1858 "name": "OpenAI: o3 Mini High",
1859 "cost_per_1m_in": 1.1,
1860 "cost_per_1m_out": 4.4,
1861 "cost_per_1m_in_cached": 0,
1862 "cost_per_1m_out_cached": 0.55,
1863 "context_window": 200000,
1864 "default_max_tokens": 50000,
1865 "can_reason": false,
1866 "has_reasoning_efforts": false,
1867 "supports_attachments": false
1868 },
1869 {
1870 "id": "openai/o3-pro",
1871 "name": "OpenAI: o3 Pro",
1872 "cost_per_1m_in": 20,
1873 "cost_per_1m_out": 80,
1874 "cost_per_1m_in_cached": 0,
1875 "cost_per_1m_out_cached": 0,
1876 "context_window": 200000,
1877 "default_max_tokens": 50000,
1878 "can_reason": true,
1879 "has_reasoning_efforts": true,
1880 "supports_attachments": true
1881 },
1882 {
1883 "id": "openai/o4-mini",
1884 "name": "OpenAI: o4 Mini",
1885 "cost_per_1m_in": 1.1,
1886 "cost_per_1m_out": 4.4,
1887 "cost_per_1m_in_cached": 0,
1888 "cost_per_1m_out_cached": 0.275,
1889 "context_window": 200000,
1890 "default_max_tokens": 50000,
1891 "can_reason": true,
1892 "has_reasoning_efforts": true,
1893 "supports_attachments": true
1894 },
1895 {
1896 "id": "openai/o4-mini-deep-research",
1897 "name": "OpenAI: o4 Mini Deep Research",
1898 "cost_per_1m_in": 2,
1899 "cost_per_1m_out": 8,
1900 "cost_per_1m_in_cached": 0,
1901 "cost_per_1m_out_cached": 0.5,
1902 "context_window": 200000,
1903 "default_max_tokens": 50000,
1904 "can_reason": true,
1905 "has_reasoning_efforts": true,
1906 "supports_attachments": true
1907 },
1908 {
1909 "id": "openai/o4-mini-high",
1910 "name": "OpenAI: o4 Mini High",
1911 "cost_per_1m_in": 1.1,
1912 "cost_per_1m_out": 4.4,
1913 "cost_per_1m_in_cached": 0,
1914 "cost_per_1m_out_cached": 0.275,
1915 "context_window": 200000,
1916 "default_max_tokens": 50000,
1917 "can_reason": true,
1918 "has_reasoning_efforts": true,
1919 "supports_attachments": true
1920 },
1921 {
1922 "id": "qwen/qwen-2.5-72b-instruct",
1923 "name": "Qwen2.5 72B Instruct",
1924 "cost_per_1m_in": 0.13,
1925 "cost_per_1m_out": 0.39999999999999997,
1926 "cost_per_1m_in_cached": 0,
1927 "cost_per_1m_out_cached": 0,
1928 "context_window": 131072,
1929 "default_max_tokens": 13107,
1930 "can_reason": false,
1931 "has_reasoning_efforts": false,
1932 "supports_attachments": false
1933 },
1934 {
1935 "id": "qwen/qwq-32b",
1936 "name": "Qwen: QwQ 32B",
1937 "cost_per_1m_in": 0.15,
1938 "cost_per_1m_out": 0.58,
1939 "cost_per_1m_in_cached": 0,
1940 "cost_per_1m_out_cached": 0,
1941 "context_window": 131072,
1942 "default_max_tokens": 65536,
1943 "can_reason": true,
1944 "has_reasoning_efforts": true,
1945 "supports_attachments": false
1946 },
1947 {
1948 "id": "qwen/qwen-plus-2025-07-28",
1949 "name": "Qwen: Qwen Plus 0728",
1950 "cost_per_1m_in": 0.39999999999999997,
1951 "cost_per_1m_out": 1.2,
1952 "cost_per_1m_in_cached": 0,
1953 "cost_per_1m_out_cached": 0,
1954 "context_window": 1000000,
1955 "default_max_tokens": 16384,
1956 "can_reason": false,
1957 "has_reasoning_efforts": false,
1958 "supports_attachments": false
1959 },
1960 {
1961 "id": "qwen/qwen-plus-2025-07-28:thinking",
1962 "name": "Qwen: Qwen Plus 0728 (thinking)",
1963 "cost_per_1m_in": 0.39999999999999997,
1964 "cost_per_1m_out": 4,
1965 "cost_per_1m_in_cached": 0,
1966 "cost_per_1m_out_cached": 0,
1967 "context_window": 1000000,
1968 "default_max_tokens": 16384,
1969 "can_reason": true,
1970 "has_reasoning_efforts": true,
1971 "supports_attachments": false
1972 },
1973 {
1974 "id": "qwen/qwen-vl-max",
1975 "name": "Qwen: Qwen VL Max",
1976 "cost_per_1m_in": 0.7999999999999999,
1977 "cost_per_1m_out": 3.1999999999999997,
1978 "cost_per_1m_in_cached": 0,
1979 "cost_per_1m_out_cached": 0,
1980 "context_window": 131072,
1981 "default_max_tokens": 4096,
1982 "can_reason": false,
1983 "has_reasoning_efforts": false,
1984 "supports_attachments": true
1985 },
1986 {
1987 "id": "qwen/qwen-max",
1988 "name": "Qwen: Qwen-Max ",
1989 "cost_per_1m_in": 1.5999999999999999,
1990 "cost_per_1m_out": 6.3999999999999995,
1991 "cost_per_1m_in_cached": 0,
1992 "cost_per_1m_out_cached": 0.64,
1993 "context_window": 32768,
1994 "default_max_tokens": 4096,
1995 "can_reason": false,
1996 "has_reasoning_efforts": false,
1997 "supports_attachments": false
1998 },
1999 {
2000 "id": "qwen/qwen-plus",
2001 "name": "Qwen: Qwen-Plus",
2002 "cost_per_1m_in": 0.39999999999999997,
2003 "cost_per_1m_out": 1.2,
2004 "cost_per_1m_in_cached": 0,
2005 "cost_per_1m_out_cached": 0.16,
2006 "context_window": 131072,
2007 "default_max_tokens": 4096,
2008 "can_reason": false,
2009 "has_reasoning_efforts": false,
2010 "supports_attachments": false
2011 },
2012 {
2013 "id": "qwen/qwen-turbo",
2014 "name": "Qwen: Qwen-Turbo",
2015 "cost_per_1m_in": 0.049999999999999996,
2016 "cost_per_1m_out": 0.19999999999999998,
2017 "cost_per_1m_in_cached": 0,
2018 "cost_per_1m_out_cached": 0.02,
2019 "context_window": 1000000,
2020 "default_max_tokens": 4096,
2021 "can_reason": false,
2022 "has_reasoning_efforts": false,
2023 "supports_attachments": false
2024 },
2025 {
2026 "id": "qwen/qwen3-14b",
2027 "name": "Qwen: Qwen3 14B",
2028 "cost_per_1m_in": 0.06,
2029 "cost_per_1m_out": 0.24,
2030 "cost_per_1m_in_cached": 0,
2031 "cost_per_1m_out_cached": 0,
2032 "context_window": 40960,
2033 "default_max_tokens": 20480,
2034 "can_reason": true,
2035 "has_reasoning_efforts": true,
2036 "supports_attachments": false
2037 },
2038 {
2039 "id": "qwen/qwen3-235b-a22b",
2040 "name": "Qwen: Qwen3 235B A22B",
2041 "cost_per_1m_in": 0.22,
2042 "cost_per_1m_out": 0.88,
2043 "cost_per_1m_in_cached": 0,
2044 "cost_per_1m_out_cached": 0,
2045 "context_window": 131072,
2046 "default_max_tokens": 8192,
2047 "can_reason": true,
2048 "has_reasoning_efforts": true,
2049 "supports_attachments": false
2050 },
2051 {
2052 "id": "qwen/qwen3-235b-a22b-2507",
2053 "name": "Qwen: Qwen3 235B A22B Instruct 2507",
2054 "cost_per_1m_in": 0.35,
2055 "cost_per_1m_out": 1.2,
2056 "cost_per_1m_in_cached": 0,
2057 "cost_per_1m_out_cached": 0,
2058 "context_window": 262144,
2059 "default_max_tokens": 131072,
2060 "can_reason": false,
2061 "has_reasoning_efforts": false,
2062 "supports_attachments": false
2063 },
2064 {
2065 "id": "qwen/qwen3-235b-a22b-thinking-2507",
2066 "name": "Qwen: Qwen3 235B A22B Thinking 2507",
2067 "cost_per_1m_in": 0.11,
2068 "cost_per_1m_out": 0.6,
2069 "cost_per_1m_in_cached": 0,
2070 "cost_per_1m_out_cached": 0,
2071 "context_window": 262144,
2072 "default_max_tokens": 131072,
2073 "can_reason": true,
2074 "has_reasoning_efforts": true,
2075 "supports_attachments": false
2076 },
2077 {
2078 "id": "qwen/qwen3-30b-a3b",
2079 "name": "Qwen: Qwen3 30B A3B",
2080 "cost_per_1m_in": 0.09,
2081 "cost_per_1m_out": 0.44999999999999996,
2082 "cost_per_1m_in_cached": 0,
2083 "cost_per_1m_out_cached": 0,
2084 "context_window": 131072,
2085 "default_max_tokens": 65536,
2086 "can_reason": true,
2087 "has_reasoning_efforts": true,
2088 "supports_attachments": false
2089 },
2090 {
2091 "id": "qwen/qwen3-30b-a3b-instruct-2507",
2092 "name": "Qwen: Qwen3 30B A3B Instruct 2507",
2093 "cost_per_1m_in": 0.08,
2094 "cost_per_1m_out": 0.33,
2095 "cost_per_1m_in_cached": 0,
2096 "cost_per_1m_out_cached": 0,
2097 "context_window": 262144,
2098 "default_max_tokens": 131072,
2099 "can_reason": false,
2100 "has_reasoning_efforts": false,
2101 "supports_attachments": false
2102 },
2103 {
2104 "id": "qwen/qwen3-30b-a3b-thinking-2507",
2105 "name": "Qwen: Qwen3 30B A3B Thinking 2507",
2106 "cost_per_1m_in": 0.08,
2107 "cost_per_1m_out": 0.29,
2108 "cost_per_1m_in_cached": 0,
2109 "cost_per_1m_out_cached": 0,
2110 "context_window": 262144,
2111 "default_max_tokens": 131072,
2112 "can_reason": true,
2113 "has_reasoning_efforts": true,
2114 "supports_attachments": false
2115 },
2116 {
2117 "id": "qwen/qwen3-32b",
2118 "name": "Qwen: Qwen3 32B",
2119 "cost_per_1m_in": 0.15,
2120 "cost_per_1m_out": 0.5,
2121 "cost_per_1m_in_cached": 0,
2122 "cost_per_1m_out_cached": 0,
2123 "context_window": 131072,
2124 "default_max_tokens": 4000,
2125 "can_reason": true,
2126 "has_reasoning_efforts": true,
2127 "supports_attachments": false
2128 },
2129 {
2130 "id": "qwen/qwen3-4b:free",
2131 "name": "Qwen: Qwen3 4B (free)",
2132 "cost_per_1m_in": 0,
2133 "cost_per_1m_out": 0,
2134 "cost_per_1m_in_cached": 0,
2135 "cost_per_1m_out_cached": 0,
2136 "context_window": 40960,
2137 "default_max_tokens": 4096,
2138 "can_reason": true,
2139 "has_reasoning_efforts": true,
2140 "supports_attachments": false
2141 },
2142 {
2143 "id": "qwen/qwen3-coder-30b-a3b-instruct",
2144 "name": "Qwen: Qwen3 Coder 30B A3B Instruct",
2145 "cost_per_1m_in": 0.07,
2146 "cost_per_1m_out": 0.26,
2147 "cost_per_1m_in_cached": 0,
2148 "cost_per_1m_out_cached": 0,
2149 "context_window": 262144,
2150 "default_max_tokens": 26214,
2151 "can_reason": false,
2152 "has_reasoning_efforts": false,
2153 "supports_attachments": false
2154 },
2155 {
2156 "id": "qwen/qwen3-coder",
2157 "name": "Qwen: Qwen3 Coder 480B A35B",
2158 "cost_per_1m_in": 0.38,
2159 "cost_per_1m_out": 1.53,
2160 "cost_per_1m_in_cached": 0,
2161 "cost_per_1m_out_cached": 0,
2162 "context_window": 262144,
2163 "default_max_tokens": 131072,
2164 "can_reason": true,
2165 "has_reasoning_efforts": true,
2166 "supports_attachments": false
2167 },
2168 {
2169 "id": "qwen/qwen3-coder:exacto",
2170 "name": "Qwen: Qwen3 Coder 480B A35B (exacto)",
2171 "cost_per_1m_in": 0.38,
2172 "cost_per_1m_out": 1.53,
2173 "cost_per_1m_in_cached": 0,
2174 "cost_per_1m_out_cached": 0,
2175 "context_window": 262144,
2176 "default_max_tokens": 131072,
2177 "can_reason": true,
2178 "has_reasoning_efforts": true,
2179 "supports_attachments": false
2180 },
2181 {
2182 "id": "qwen/qwen3-coder:free",
2183 "name": "Qwen: Qwen3 Coder 480B A35B (free)",
2184 "cost_per_1m_in": 0,
2185 "cost_per_1m_out": 0,
2186 "cost_per_1m_in_cached": 0,
2187 "cost_per_1m_out_cached": 0,
2188 "context_window": 262144,
2189 "default_max_tokens": 26214,
2190 "can_reason": false,
2191 "has_reasoning_efforts": false,
2192 "supports_attachments": false
2193 },
2194 {
2195 "id": "qwen/qwen3-coder-flash",
2196 "name": "Qwen: Qwen3 Coder Flash",
2197 "cost_per_1m_in": 0.3,
2198 "cost_per_1m_out": 1.5,
2199 "cost_per_1m_in_cached": 0,
2200 "cost_per_1m_out_cached": 0.08,
2201 "context_window": 128000,
2202 "default_max_tokens": 32768,
2203 "can_reason": false,
2204 "has_reasoning_efforts": false,
2205 "supports_attachments": false
2206 },
2207 {
2208 "id": "qwen/qwen3-coder-plus",
2209 "name": "Qwen: Qwen3 Coder Plus",
2210 "cost_per_1m_in": 1,
2211 "cost_per_1m_out": 5,
2212 "cost_per_1m_in_cached": 0,
2213 "cost_per_1m_out_cached": 0.09999999999999999,
2214 "context_window": 128000,
2215 "default_max_tokens": 32768,
2216 "can_reason": false,
2217 "has_reasoning_efforts": false,
2218 "supports_attachments": false
2219 },
2220 {
2221 "id": "qwen/qwen3-max",
2222 "name": "Qwen: Qwen3 Max",
2223 "cost_per_1m_in": 1.2,
2224 "cost_per_1m_out": 6,
2225 "cost_per_1m_in_cached": 0,
2226 "cost_per_1m_out_cached": 0.24,
2227 "context_window": 256000,
2228 "default_max_tokens": 16384,
2229 "can_reason": false,
2230 "has_reasoning_efforts": false,
2231 "supports_attachments": false
2232 },
2233 {
2234 "id": "qwen/qwen3-next-80b-a3b-instruct",
2235 "name": "Qwen: Qwen3 Next 80B A3B Instruct",
2236 "cost_per_1m_in": 0.15,
2237 "cost_per_1m_out": 1.5,
2238 "cost_per_1m_in_cached": 0,
2239 "cost_per_1m_out_cached": 0,
2240 "context_window": 262144,
2241 "default_max_tokens": 26214,
2242 "can_reason": false,
2243 "has_reasoning_efforts": false,
2244 "supports_attachments": false
2245 },
2246 {
2247 "id": "qwen/qwen3-next-80b-a3b-thinking",
2248 "name": "Qwen: Qwen3 Next 80B A3B Thinking",
2249 "cost_per_1m_in": 0.3,
2250 "cost_per_1m_out": 0.3,
2251 "cost_per_1m_in_cached": 0,
2252 "cost_per_1m_out_cached": 0,
2253 "context_window": 262144,
2254 "default_max_tokens": 131072,
2255 "can_reason": true,
2256 "has_reasoning_efforts": true,
2257 "supports_attachments": false
2258 },
2259 {
2260 "id": "qwen/qwen3-vl-235b-a22b-instruct",
2261 "name": "Qwen: Qwen3 VL 235B A22B Instruct",
2262 "cost_per_1m_in": 0.5,
2263 "cost_per_1m_out": 2.5,
2264 "cost_per_1m_in_cached": 0,
2265 "cost_per_1m_out_cached": 0,
2266 "context_window": 131072,
2267 "default_max_tokens": 65536,
2268 "can_reason": false,
2269 "has_reasoning_efforts": false,
2270 "supports_attachments": true
2271 },
2272 {
2273 "id": "qwen/qwen3-vl-30b-a3b-thinking",
2274 "name": "Qwen: Qwen3 VL 30B A3B Thinking",
2275 "cost_per_1m_in": 0.19999999999999998,
2276 "cost_per_1m_out": 1,
2277 "cost_per_1m_in_cached": 0,
2278 "cost_per_1m_out_cached": 0,
2279 "context_window": 131072,
2280 "default_max_tokens": 16384,
2281 "can_reason": true,
2282 "has_reasoning_efforts": true,
2283 "supports_attachments": true
2284 },
2285 {
2286 "id": "qwen/qwen3-vl-8b-instruct",
2287 "name": "Qwen: Qwen3 VL 8B Instruct",
2288 "cost_per_1m_in": 0.08,
2289 "cost_per_1m_out": 0.5,
2290 "cost_per_1m_in_cached": 0,
2291 "cost_per_1m_out_cached": 0,
2292 "context_window": 131072,
2293 "default_max_tokens": 16384,
2294 "can_reason": false,
2295 "has_reasoning_efforts": false,
2296 "supports_attachments": true
2297 },
2298 {
2299 "id": "qwen/qwen3-vl-8b-thinking",
2300 "name": "Qwen: Qwen3 VL 8B Thinking",
2301 "cost_per_1m_in": 0.18,
2302 "cost_per_1m_out": 2.0999999999999996,
2303 "cost_per_1m_in_cached": 0,
2304 "cost_per_1m_out_cached": 0,
2305 "context_window": 256000,
2306 "default_max_tokens": 16384,
2307 "can_reason": true,
2308 "has_reasoning_efforts": true,
2309 "supports_attachments": true
2310 },
2311 {
2312 "id": "sao10k/l3-euryale-70b",
2313 "name": "Sao10k: Llama 3 Euryale 70B v2.1",
2314 "cost_per_1m_in": 1.48,
2315 "cost_per_1m_out": 1.48,
2316 "cost_per_1m_in_cached": 0,
2317 "cost_per_1m_out_cached": 0,
2318 "context_window": 8192,
2319 "default_max_tokens": 4096,
2320 "can_reason": false,
2321 "has_reasoning_efforts": false,
2322 "supports_attachments": false
2323 },
2324 {
2325 "id": "stepfun-ai/step3",
2326 "name": "StepFun: Step3",
2327 "cost_per_1m_in": 0.5700000000000001,
2328 "cost_per_1m_out": 1.42,
2329 "cost_per_1m_in_cached": 0,
2330 "cost_per_1m_out_cached": 0,
2331 "context_window": 65536,
2332 "default_max_tokens": 32768,
2333 "can_reason": true,
2334 "has_reasoning_efforts": true,
2335 "supports_attachments": true
2336 },
2337 {
2338 "id": "tngtech/deepseek-r1t2-chimera",
2339 "name": "TNG: DeepSeek R1T2 Chimera",
2340 "cost_per_1m_in": 0.3,
2341 "cost_per_1m_out": 1.2,
2342 "cost_per_1m_in_cached": 0,
2343 "cost_per_1m_out_cached": 0,
2344 "context_window": 163840,
2345 "default_max_tokens": 81920,
2346 "can_reason": true,
2347 "has_reasoning_efforts": true,
2348 "supports_attachments": false
2349 },
2350 {
2351 "id": "thedrummer/rocinante-12b",
2352 "name": "TheDrummer: Rocinante 12B",
2353 "cost_per_1m_in": 0.16999999999999998,
2354 "cost_per_1m_out": 0.43,
2355 "cost_per_1m_in_cached": 0,
2356 "cost_per_1m_out_cached": 0,
2357 "context_window": 32768,
2358 "default_max_tokens": 3276,
2359 "can_reason": false,
2360 "has_reasoning_efforts": false,
2361 "supports_attachments": false
2362 },
2363 {
2364 "id": "thedrummer/unslopnemo-12b",
2365 "name": "TheDrummer: UnslopNemo 12B",
2366 "cost_per_1m_in": 0.39999999999999997,
2367 "cost_per_1m_out": 0.39999999999999997,
2368 "cost_per_1m_in_cached": 0,
2369 "cost_per_1m_out_cached": 0,
2370 "context_window": 32768,
2371 "default_max_tokens": 3276,
2372 "can_reason": false,
2373 "has_reasoning_efforts": false,
2374 "supports_attachments": false
2375 },
2376 {
2377 "id": "alibaba/tongyi-deepresearch-30b-a3b",
2378 "name": "Tongyi DeepResearch 30B A3B",
2379 "cost_per_1m_in": 0.09,
2380 "cost_per_1m_out": 0.39999999999999997,
2381 "cost_per_1m_in_cached": 0,
2382 "cost_per_1m_out_cached": 0,
2383 "context_window": 131072,
2384 "default_max_tokens": 65536,
2385 "can_reason": true,
2386 "has_reasoning_efforts": true,
2387 "supports_attachments": false
2388 },
2389 {
2390 "id": "alibaba/tongyi-deepresearch-30b-a3b:free",
2391 "name": "Tongyi DeepResearch 30B A3B (free)",
2392 "cost_per_1m_in": 0,
2393 "cost_per_1m_out": 0,
2394 "cost_per_1m_in_cached": 0,
2395 "cost_per_1m_out_cached": 0,
2396 "context_window": 131072,
2397 "default_max_tokens": 65536,
2398 "can_reason": true,
2399 "has_reasoning_efforts": true,
2400 "supports_attachments": false
2401 },
2402 {
2403 "id": "z-ai/glm-4-32b",
2404 "name": "Z.AI: GLM 4 32B ",
2405 "cost_per_1m_in": 0.09999999999999999,
2406 "cost_per_1m_out": 0.09999999999999999,
2407 "cost_per_1m_in_cached": 0,
2408 "cost_per_1m_out_cached": 0,
2409 "context_window": 128000,
2410 "default_max_tokens": 12800,
2411 "can_reason": false,
2412 "has_reasoning_efforts": false,
2413 "supports_attachments": false
2414 },
2415 {
2416 "id": "z-ai/glm-4.5",
2417 "name": "Z.AI: GLM 4.5",
2418 "cost_per_1m_in": 0.35,
2419 "cost_per_1m_out": 1.55,
2420 "cost_per_1m_in_cached": 0,
2421 "cost_per_1m_out_cached": 0,
2422 "context_window": 131072,
2423 "default_max_tokens": 65536,
2424 "can_reason": true,
2425 "has_reasoning_efforts": true,
2426 "supports_attachments": false
2427 },
2428 {
2429 "id": "z-ai/glm-4.5-air",
2430 "name": "Z.AI: GLM 4.5 Air",
2431 "cost_per_1m_in": 0.19999999999999998,
2432 "cost_per_1m_out": 1.2,
2433 "cost_per_1m_in_cached": 0,
2434 "cost_per_1m_out_cached": 0,
2435 "context_window": 131072,
2436 "default_max_tokens": 13107,
2437 "can_reason": true,
2438 "has_reasoning_efforts": true,
2439 "supports_attachments": false
2440 },
2441 {
2442 "id": "z-ai/glm-4.5-air:free",
2443 "name": "Z.AI: GLM 4.5 Air (free)",
2444 "cost_per_1m_in": 0,
2445 "cost_per_1m_out": 0,
2446 "cost_per_1m_in_cached": 0,
2447 "cost_per_1m_out_cached": 0,
2448 "context_window": 131072,
2449 "default_max_tokens": 48000,
2450 "can_reason": true,
2451 "has_reasoning_efforts": true,
2452 "supports_attachments": false
2453 },
2454 {
2455 "id": "z-ai/glm-4.5v",
2456 "name": "Z.AI: GLM 4.5V",
2457 "cost_per_1m_in": 0.6,
2458 "cost_per_1m_out": 1.7999999999999998,
2459 "cost_per_1m_in_cached": 0,
2460 "cost_per_1m_out_cached": 0.11,
2461 "context_window": 65536,
2462 "default_max_tokens": 8192,
2463 "can_reason": true,
2464 "has_reasoning_efforts": true,
2465 "supports_attachments": true
2466 },
2467 {
2468 "id": "z-ai/glm-4.6",
2469 "name": "Z.AI: GLM 4.6",
2470 "cost_per_1m_in": 0.6,
2471 "cost_per_1m_out": 2.2,
2472 "cost_per_1m_in_cached": 0,
2473 "cost_per_1m_out_cached": 0.11,
2474 "context_window": 204800,
2475 "default_max_tokens": 65536,
2476 "can_reason": true,
2477 "has_reasoning_efforts": true,
2478 "supports_attachments": false
2479 },
2480 {
2481 "id": "z-ai/glm-4.6:exacto",
2482 "name": "Z.AI: GLM 4.6 (exacto)",
2483 "cost_per_1m_in": 0.6,
2484 "cost_per_1m_out": 1.9,
2485 "cost_per_1m_in_cached": 0,
2486 "cost_per_1m_out_cached": 0,
2487 "context_window": 202752,
2488 "default_max_tokens": 20275,
2489 "can_reason": true,
2490 "has_reasoning_efforts": true,
2491 "supports_attachments": false
2492 },
2493 {
2494 "id": "inclusionai/ling-1t",
2495 "name": "inclusionAI: Ling-1T",
2496 "cost_per_1m_in": 0.5700000000000001,
2497 "cost_per_1m_out": 2.2800000000000002,
2498 "cost_per_1m_in_cached": 0,
2499 "cost_per_1m_out_cached": 0,
2500 "context_window": 131072,
2501 "default_max_tokens": 65536,
2502 "can_reason": false,
2503 "has_reasoning_efforts": false,
2504 "supports_attachments": false
2505 },
2506 {
2507 "id": "inclusionai/ring-1t",
2508 "name": "inclusionAI: Ring 1T",
2509 "cost_per_1m_in": 0.5700000000000001,
2510 "cost_per_1m_out": 2.2800000000000002,
2511 "cost_per_1m_in_cached": 0,
2512 "cost_per_1m_out_cached": 0,
2513 "context_window": 131072,
2514 "default_max_tokens": 65536,
2515 "can_reason": true,
2516 "has_reasoning_efforts": true,
2517 "supports_attachments": false
2518 },
2519 {
2520 "id": "x-ai/grok-3",
2521 "name": "xAI: Grok 3",
2522 "cost_per_1m_in": 5,
2523 "cost_per_1m_out": 25,
2524 "cost_per_1m_in_cached": 0,
2525 "cost_per_1m_out_cached": 1.25,
2526 "context_window": 131072,
2527 "default_max_tokens": 13107,
2528 "can_reason": false,
2529 "has_reasoning_efforts": false,
2530 "supports_attachments": false
2531 },
2532 {
2533 "id": "x-ai/grok-3-beta",
2534 "name": "xAI: Grok 3 Beta",
2535 "cost_per_1m_in": 5,
2536 "cost_per_1m_out": 25,
2537 "cost_per_1m_in_cached": 0,
2538 "cost_per_1m_out_cached": 1.25,
2539 "context_window": 131072,
2540 "default_max_tokens": 13107,
2541 "can_reason": false,
2542 "has_reasoning_efforts": false,
2543 "supports_attachments": false
2544 },
2545 {
2546 "id": "x-ai/grok-3-mini",
2547 "name": "xAI: Grok 3 Mini",
2548 "cost_per_1m_in": 0.3,
2549 "cost_per_1m_out": 0.5,
2550 "cost_per_1m_in_cached": 0,
2551 "cost_per_1m_out_cached": 0.075,
2552 "context_window": 131072,
2553 "default_max_tokens": 13107,
2554 "can_reason": true,
2555 "has_reasoning_efforts": true,
2556 "supports_attachments": false
2557 },
2558 {
2559 "id": "x-ai/grok-3-mini-beta",
2560 "name": "xAI: Grok 3 Mini Beta",
2561 "cost_per_1m_in": 0.3,
2562 "cost_per_1m_out": 0.5,
2563 "cost_per_1m_in_cached": 0,
2564 "cost_per_1m_out_cached": 0.075,
2565 "context_window": 131072,
2566 "default_max_tokens": 13107,
2567 "can_reason": true,
2568 "has_reasoning_efforts": true,
2569 "supports_attachments": false
2570 },
2571 {
2572 "id": "x-ai/grok-4",
2573 "name": "xAI: Grok 4",
2574 "cost_per_1m_in": 3,
2575 "cost_per_1m_out": 15,
2576 "cost_per_1m_in_cached": 0,
2577 "cost_per_1m_out_cached": 0.75,
2578 "context_window": 256000,
2579 "default_max_tokens": 25600,
2580 "can_reason": true,
2581 "has_reasoning_efforts": true,
2582 "supports_attachments": true
2583 },
2584 {
2585 "id": "x-ai/grok-4-fast",
2586 "name": "xAI: Grok 4 Fast",
2587 "cost_per_1m_in": 0.19999999999999998,
2588 "cost_per_1m_out": 0.5,
2589 "cost_per_1m_in_cached": 0,
2590 "cost_per_1m_out_cached": 0.049999999999999996,
2591 "context_window": 2000000,
2592 "default_max_tokens": 15000,
2593 "can_reason": true,
2594 "has_reasoning_efforts": true,
2595 "supports_attachments": true
2596 },
2597 {
2598 "id": "x-ai/grok-code-fast-1",
2599 "name": "xAI: Grok Code Fast 1",
2600 "cost_per_1m_in": 0.19999999999999998,
2601 "cost_per_1m_out": 1.5,
2602 "cost_per_1m_in_cached": 0,
2603 "cost_per_1m_out_cached": 0.02,
2604 "context_window": 256000,
2605 "default_max_tokens": 5000,
2606 "can_reason": true,
2607 "has_reasoning_efforts": true,
2608 "supports_attachments": false
2609 }
2610 ],
2611 "default_headers": {
2612 "HTTP-Referer": "https://charm.land",
2613 "X-Title": "Crush"
2614 }
2615}