1{
2 "name": "Venice AI",
3 "id": "venice",
4 "api_key": "$VENICE_API_KEY",
5 "api_endpoint": "https://api.venice.ai/api/v1",
6 "type": "openai-compat",
7 "default_large_model_id": "claude-opus-4-6-fast",
8 "default_small_model_id": "deepseek-v4-flash",
9 "models": [
10 {
11 "id": "claude-opus-4-5",
12 "name": "Claude Opus 4.5",
13 "cost_per_1m_in": 6,
14 "cost_per_1m_out": 30,
15 "cost_per_1m_in_cached": 0,
16 "cost_per_1m_out_cached": 0,
17 "context_window": 198000,
18 "default_max_tokens": 32768,
19 "can_reason": true,
20 "reasoning_levels": [
21 "low",
22 "medium",
23 "high"
24 ],
25 "default_reasoning_effort": "medium",
26 "supports_attachments": true
27 },
28 {
29 "id": "claude-opus-4-6",
30 "name": "Claude Opus 4.6",
31 "cost_per_1m_in": 6,
32 "cost_per_1m_out": 30,
33 "cost_per_1m_in_cached": 0,
34 "cost_per_1m_out_cached": 0,
35 "context_window": 1000000,
36 "default_max_tokens": 128000,
37 "can_reason": true,
38 "reasoning_levels": [
39 "low",
40 "medium",
41 "high"
42 ],
43 "default_reasoning_effort": "medium",
44 "supports_attachments": true
45 },
46 {
47 "id": "claude-opus-4-6-fast",
48 "name": "Claude Opus 4.6 Fast",
49 "cost_per_1m_in": 36,
50 "cost_per_1m_out": 180,
51 "cost_per_1m_in_cached": 0,
52 "cost_per_1m_out_cached": 0,
53 "context_window": 1000000,
54 "default_max_tokens": 128000,
55 "can_reason": true,
56 "reasoning_levels": [
57 "low",
58 "medium",
59 "high"
60 ],
61 "default_reasoning_effort": "medium",
62 "supports_attachments": true
63 },
64 {
65 "id": "claude-opus-4-7",
66 "name": "Claude Opus 4.7",
67 "cost_per_1m_in": 6,
68 "cost_per_1m_out": 30,
69 "cost_per_1m_in_cached": 0,
70 "cost_per_1m_out_cached": 0,
71 "context_window": 1000000,
72 "default_max_tokens": 128000,
73 "can_reason": true,
74 "reasoning_levels": [
75 "low",
76 "medium",
77 "high"
78 ],
79 "default_reasoning_effort": "medium",
80 "supports_attachments": true
81 },
82 {
83 "id": "claude-sonnet-4-5",
84 "name": "Claude Sonnet 4.5",
85 "cost_per_1m_in": 3.75,
86 "cost_per_1m_out": 18.75,
87 "cost_per_1m_in_cached": 0,
88 "cost_per_1m_out_cached": 0,
89 "context_window": 198000,
90 "default_max_tokens": 64000,
91 "can_reason": true,
92 "reasoning_levels": [
93 "low",
94 "medium",
95 "high"
96 ],
97 "default_reasoning_effort": "medium",
98 "supports_attachments": true
99 },
100 {
101 "id": "claude-sonnet-4-6",
102 "name": "Claude Sonnet 4.6",
103 "cost_per_1m_in": 3.6,
104 "cost_per_1m_out": 18,
105 "cost_per_1m_in_cached": 0,
106 "cost_per_1m_out_cached": 0,
107 "context_window": 1000000,
108 "default_max_tokens": 64000,
109 "can_reason": true,
110 "reasoning_levels": [
111 "low",
112 "medium",
113 "high"
114 ],
115 "default_reasoning_effort": "medium",
116 "supports_attachments": true
117 },
118 {
119 "id": "deepseek-v3.2",
120 "name": "DeepSeek V3.2",
121 "cost_per_1m_in": 0.33,
122 "cost_per_1m_out": 0.48,
123 "cost_per_1m_in_cached": 0,
124 "cost_per_1m_out_cached": 0,
125 "context_window": 160000,
126 "default_max_tokens": 32768,
127 "can_reason": true,
128 "supports_attachments": false
129 },
130 {
131 "id": "deepseek-v4-flash",
132 "name": "DeepSeek V4 Flash",
133 "cost_per_1m_in": 0.17,
134 "cost_per_1m_out": 0.35,
135 "cost_per_1m_in_cached": 0,
136 "cost_per_1m_out_cached": 0,
137 "context_window": 1000000,
138 "default_max_tokens": 32768,
139 "can_reason": true,
140 "reasoning_levels": [
141 "low",
142 "medium",
143 "high"
144 ],
145 "default_reasoning_effort": "medium",
146 "supports_attachments": false
147 },
148 {
149 "id": "deepseek-v4-pro",
150 "name": "DeepSeek V4 Pro",
151 "cost_per_1m_in": 1.73,
152 "cost_per_1m_out": 3.796,
153 "cost_per_1m_in_cached": 0,
154 "cost_per_1m_out_cached": 0,
155 "context_window": 1000000,
156 "default_max_tokens": 32768,
157 "can_reason": true,
158 "reasoning_levels": [
159 "low",
160 "medium",
161 "high"
162 ],
163 "default_reasoning_effort": "medium",
164 "supports_attachments": false
165 },
166 {
167 "id": "zai-org-glm-4.6",
168 "name": "GLM 4.6",
169 "cost_per_1m_in": 0.85,
170 "cost_per_1m_out": 2.75,
171 "cost_per_1m_in_cached": 0,
172 "cost_per_1m_out_cached": 0,
173 "context_window": 198000,
174 "default_max_tokens": 16384,
175 "can_reason": true,
176 "supports_attachments": false
177 },
178 {
179 "id": "zai-org-glm-4.7",
180 "name": "GLM 4.7",
181 "cost_per_1m_in": 0.55,
182 "cost_per_1m_out": 2.65,
183 "cost_per_1m_in_cached": 0,
184 "cost_per_1m_out_cached": 0,
185 "context_window": 198000,
186 "default_max_tokens": 16384,
187 "can_reason": true,
188 "supports_attachments": false
189 },
190 {
191 "id": "zai-org-glm-4.7-flash",
192 "name": "GLM 4.7 Flash",
193 "cost_per_1m_in": 0.125,
194 "cost_per_1m_out": 0.5,
195 "cost_per_1m_in_cached": 0,
196 "cost_per_1m_out_cached": 0,
197 "context_window": 128000,
198 "default_max_tokens": 16384,
199 "can_reason": true,
200 "reasoning_levels": [
201 "low",
202 "medium",
203 "high"
204 ],
205 "default_reasoning_effort": "medium",
206 "supports_attachments": false
207 },
208 {
209 "id": "olafangensan-glm-4.7-flash-heretic",
210 "name": "GLM 4.7 Flash Heretic",
211 "cost_per_1m_in": 0.14,
212 "cost_per_1m_out": 0.8,
213 "cost_per_1m_in_cached": 0,
214 "cost_per_1m_out_cached": 0,
215 "context_window": 200000,
216 "default_max_tokens": 24000,
217 "can_reason": true,
218 "supports_attachments": false
219 },
220 {
221 "id": "zai-org-glm-5",
222 "name": "GLM 5",
223 "cost_per_1m_in": 1,
224 "cost_per_1m_out": 3.2,
225 "cost_per_1m_in_cached": 0,
226 "cost_per_1m_out_cached": 0,
227 "context_window": 198000,
228 "default_max_tokens": 32000,
229 "can_reason": true,
230 "supports_attachments": false
231 },
232 {
233 "id": "z-ai-glm-5-turbo",
234 "name": "GLM 5 Turbo",
235 "cost_per_1m_in": 1.2,
236 "cost_per_1m_out": 4,
237 "cost_per_1m_in_cached": 0,
238 "cost_per_1m_out_cached": 0,
239 "context_window": 200000,
240 "default_max_tokens": 32768,
241 "can_reason": true,
242 "reasoning_levels": [
243 "low",
244 "medium",
245 "high"
246 ],
247 "default_reasoning_effort": "medium",
248 "supports_attachments": false
249 },
250 {
251 "id": "zai-org-glm-5-1",
252 "name": "GLM 5.1",
253 "cost_per_1m_in": 1.75,
254 "cost_per_1m_out": 5.5,
255 "cost_per_1m_in_cached": 0,
256 "cost_per_1m_out_cached": 0,
257 "context_window": 200000,
258 "default_max_tokens": 24000,
259 "can_reason": true,
260 "supports_attachments": false
261 },
262 {
263 "id": "z-ai-glm-5v-turbo",
264 "name": "GLM 5V Turbo",
265 "cost_per_1m_in": 1.5,
266 "cost_per_1m_out": 5,
267 "cost_per_1m_in_cached": 0,
268 "cost_per_1m_out_cached": 0,
269 "context_window": 200000,
270 "default_max_tokens": 32768,
271 "can_reason": true,
272 "reasoning_levels": [
273 "low",
274 "medium",
275 "high"
276 ],
277 "default_reasoning_effort": "medium",
278 "supports_attachments": true
279 },
280 {
281 "id": "openai-gpt-4o-2024-11-20",
282 "name": "GPT-4o",
283 "cost_per_1m_in": 3.125,
284 "cost_per_1m_out": 12.5,
285 "cost_per_1m_in_cached": 0,
286 "cost_per_1m_out_cached": 0,
287 "context_window": 128000,
288 "default_max_tokens": 16384,
289 "can_reason": false,
290 "supports_attachments": true
291 },
292 {
293 "id": "openai-gpt-4o-mini-2024-07-18",
294 "name": "GPT-4o Mini",
295 "cost_per_1m_in": 0.1875,
296 "cost_per_1m_out": 0.75,
297 "cost_per_1m_in_cached": 0,
298 "cost_per_1m_out_cached": 0,
299 "context_window": 128000,
300 "default_max_tokens": 16384,
301 "can_reason": false,
302 "supports_attachments": true
303 },
304 {
305 "id": "openai-gpt-52",
306 "name": "GPT-5.2",
307 "cost_per_1m_in": 2.19,
308 "cost_per_1m_out": 17.5,
309 "cost_per_1m_in_cached": 0,
310 "cost_per_1m_out_cached": 0,
311 "context_window": 256000,
312 "default_max_tokens": 65536,
313 "can_reason": true,
314 "reasoning_levels": [
315 "low",
316 "medium",
317 "high"
318 ],
319 "default_reasoning_effort": "medium",
320 "supports_attachments": false
321 },
322 {
323 "id": "openai-gpt-52-codex",
324 "name": "GPT-5.2 Codex",
325 "cost_per_1m_in": 2.19,
326 "cost_per_1m_out": 17.5,
327 "cost_per_1m_in_cached": 0,
328 "cost_per_1m_out_cached": 0,
329 "context_window": 256000,
330 "default_max_tokens": 65536,
331 "can_reason": true,
332 "reasoning_levels": [
333 "low",
334 "medium",
335 "high"
336 ],
337 "default_reasoning_effort": "medium",
338 "supports_attachments": true
339 },
340 {
341 "id": "openai-gpt-53-codex",
342 "name": "GPT-5.3 Codex",
343 "cost_per_1m_in": 2.19,
344 "cost_per_1m_out": 17.5,
345 "cost_per_1m_in_cached": 0,
346 "cost_per_1m_out_cached": 0,
347 "context_window": 400000,
348 "default_max_tokens": 128000,
349 "can_reason": true,
350 "reasoning_levels": [
351 "low",
352 "medium",
353 "high"
354 ],
355 "default_reasoning_effort": "medium",
356 "supports_attachments": true
357 },
358 {
359 "id": "openai-gpt-54",
360 "name": "GPT-5.4",
361 "cost_per_1m_in": 3.13,
362 "cost_per_1m_out": 18.8,
363 "cost_per_1m_in_cached": 0,
364 "cost_per_1m_out_cached": 0,
365 "context_window": 1000000,
366 "default_max_tokens": 131072,
367 "can_reason": true,
368 "reasoning_levels": [
369 "low",
370 "medium",
371 "high"
372 ],
373 "default_reasoning_effort": "medium",
374 "supports_attachments": true
375 },
376 {
377 "id": "openai-gpt-54-mini",
378 "name": "GPT-5.4 Mini",
379 "cost_per_1m_in": 0.9375,
380 "cost_per_1m_out": 5.625,
381 "cost_per_1m_in_cached": 0,
382 "cost_per_1m_out_cached": 0,
383 "context_window": 400000,
384 "default_max_tokens": 128000,
385 "can_reason": true,
386 "reasoning_levels": [
387 "low",
388 "medium",
389 "high"
390 ],
391 "default_reasoning_effort": "medium",
392 "supports_attachments": true
393 },
394 {
395 "id": "openai-gpt-54-pro",
396 "name": "GPT-5.4 Pro",
397 "cost_per_1m_in": 37.5,
398 "cost_per_1m_out": 225,
399 "cost_per_1m_in_cached": 0,
400 "cost_per_1m_out_cached": 0,
401 "context_window": 1000000,
402 "default_max_tokens": 128000,
403 "can_reason": true,
404 "reasoning_levels": [
405 "low",
406 "medium",
407 "high"
408 ],
409 "default_reasoning_effort": "medium",
410 "supports_attachments": true
411 },
412 {
413 "id": "openai-gpt-55",
414 "name": "GPT-5.5",
415 "cost_per_1m_in": 6.25,
416 "cost_per_1m_out": 37.5,
417 "cost_per_1m_in_cached": 0,
418 "cost_per_1m_out_cached": 0,
419 "context_window": 1000000,
420 "default_max_tokens": 131072,
421 "can_reason": true,
422 "reasoning_levels": [
423 "low",
424 "medium",
425 "high"
426 ],
427 "default_reasoning_effort": "medium",
428 "supports_attachments": true
429 },
430 {
431 "id": "openai-gpt-55-pro",
432 "name": "GPT-5.5 Pro",
433 "cost_per_1m_in": 37.5,
434 "cost_per_1m_out": 225,
435 "cost_per_1m_in_cached": 0,
436 "cost_per_1m_out_cached": 0,
437 "context_window": 1000000,
438 "default_max_tokens": 128000,
439 "can_reason": true,
440 "reasoning_levels": [
441 "low",
442 "medium",
443 "high"
444 ],
445 "default_reasoning_effort": "medium",
446 "supports_attachments": true
447 },
448 {
449 "id": "gemini-3-flash-preview",
450 "name": "Gemini 3 Flash Preview",
451 "cost_per_1m_in": 0.7,
452 "cost_per_1m_out": 3.75,
453 "cost_per_1m_in_cached": 0,
454 "cost_per_1m_out_cached": 0,
455 "context_window": 256000,
456 "default_max_tokens": 65536,
457 "can_reason": true,
458 "reasoning_levels": [
459 "low",
460 "medium",
461 "high"
462 ],
463 "default_reasoning_effort": "medium",
464 "supports_attachments": true
465 },
466 {
467 "id": "gemini-3-1-pro-preview",
468 "name": "Gemini 3.1 Pro Preview",
469 "cost_per_1m_in": 2.5,
470 "cost_per_1m_out": 15,
471 "cost_per_1m_in_cached": 0,
472 "cost_per_1m_out_cached": 0,
473 "context_window": 1000000,
474 "default_max_tokens": 32768,
475 "can_reason": true,
476 "reasoning_levels": [
477 "low",
478 "medium",
479 "high"
480 ],
481 "default_reasoning_effort": "medium",
482 "supports_attachments": true
483 },
484 {
485 "id": "gemma-4-uncensored",
486 "name": "Gemma 4 Uncensored",
487 "cost_per_1m_in": 0.1625,
488 "cost_per_1m_out": 0.5,
489 "cost_per_1m_in_cached": 0,
490 "cost_per_1m_out_cached": 0,
491 "context_window": 256000,
492 "default_max_tokens": 8192,
493 "can_reason": false,
494 "supports_attachments": true
495 },
496 {
497 "id": "google-gemma-3-27b-it",
498 "name": "Google Gemma 3 27B Instruct",
499 "cost_per_1m_in": 0.12,
500 "cost_per_1m_out": 0.2,
501 "cost_per_1m_in_cached": 0,
502 "cost_per_1m_out_cached": 0,
503 "context_window": 198000,
504 "default_max_tokens": 16384,
505 "can_reason": false,
506 "supports_attachments": true
507 },
508 {
509 "id": "google-gemma-4-26b-a4b-it",
510 "name": "Google Gemma 4 26B A4B Instruct",
511 "cost_per_1m_in": 0.1625,
512 "cost_per_1m_out": 0.5,
513 "cost_per_1m_in_cached": 0,
514 "cost_per_1m_out_cached": 0,
515 "context_window": 256000,
516 "default_max_tokens": 8192,
517 "can_reason": true,
518 "supports_attachments": true
519 },
520 {
521 "id": "google-gemma-4-31b-it",
522 "name": "Google Gemma 4 31B Instruct",
523 "cost_per_1m_in": 0.175,
524 "cost_per_1m_out": 0.5,
525 "cost_per_1m_in_cached": 0,
526 "cost_per_1m_out_cached": 0,
527 "context_window": 256000,
528 "default_max_tokens": 8192,
529 "can_reason": true,
530 "supports_attachments": true
531 },
532 {
533 "id": "grok-41-fast",
534 "name": "Grok 4.1 Fast",
535 "cost_per_1m_in": 0.23,
536 "cost_per_1m_out": 0.57,
537 "cost_per_1m_in_cached": 0,
538 "cost_per_1m_out_cached": 0,
539 "context_window": 1000000,
540 "default_max_tokens": 30000,
541 "can_reason": true,
542 "supports_attachments": true
543 },
544 {
545 "id": "grok-4-20",
546 "name": "Grok 4.20",
547 "cost_per_1m_in": 2.27,
548 "cost_per_1m_out": 6.8,
549 "cost_per_1m_in_cached": 0,
550 "cost_per_1m_out_cached": 0,
551 "context_window": 2000000,
552 "default_max_tokens": 128000,
553 "can_reason": true,
554 "supports_attachments": true
555 },
556 {
557 "id": "grok-4-3",
558 "name": "Grok 4.3",
559 "cost_per_1m_in": 1.42,
560 "cost_per_1m_out": 2.83,
561 "cost_per_1m_in_cached": 0,
562 "cost_per_1m_out_cached": 0,
563 "context_window": 1000000,
564 "default_max_tokens": 32000,
565 "can_reason": true,
566 "supports_attachments": true
567 },
568 {
569 "id": "kimi-k2-thinking",
570 "name": "Kimi K2 Thinking",
571 "cost_per_1m_in": 0.75,
572 "cost_per_1m_out": 3.2,
573 "cost_per_1m_in_cached": 0,
574 "cost_per_1m_out_cached": 0,
575 "context_window": 256000,
576 "default_max_tokens": 65536,
577 "can_reason": true,
578 "reasoning_levels": [
579 "low",
580 "medium",
581 "high"
582 ],
583 "default_reasoning_effort": "medium",
584 "supports_attachments": false
585 },
586 {
587 "id": "kimi-k2-5",
588 "name": "Kimi K2.5",
589 "cost_per_1m_in": 0.56,
590 "cost_per_1m_out": 3.5,
591 "cost_per_1m_in_cached": 0,
592 "cost_per_1m_out_cached": 0,
593 "context_window": 256000,
594 "default_max_tokens": 65536,
595 "can_reason": true,
596 "reasoning_levels": [
597 "low",
598 "medium",
599 "high"
600 ],
601 "default_reasoning_effort": "medium",
602 "supports_attachments": true
603 },
604 {
605 "id": "kimi-k2-6",
606 "name": "Kimi K2.6",
607 "cost_per_1m_in": 0.85,
608 "cost_per_1m_out": 4.655,
609 "cost_per_1m_in_cached": 0,
610 "cost_per_1m_out_cached": 0,
611 "context_window": 256000,
612 "default_max_tokens": 65536,
613 "can_reason": true,
614 "reasoning_levels": [
615 "low",
616 "medium",
617 "high"
618 ],
619 "default_reasoning_effort": "medium",
620 "supports_attachments": true
621 },
622 {
623 "id": "llama-3.2-3b",
624 "name": "Llama 3.2 3B",
625 "cost_per_1m_in": 0.15,
626 "cost_per_1m_out": 0.6,
627 "cost_per_1m_in_cached": 0,
628 "cost_per_1m_out_cached": 0,
629 "context_window": 128000,
630 "default_max_tokens": 4096,
631 "can_reason": false,
632 "supports_attachments": false
633 },
634 {
635 "id": "llama-3.3-70b",
636 "name": "Llama 3.3 70B",
637 "cost_per_1m_in": 0.7,
638 "cost_per_1m_out": 2.8,
639 "cost_per_1m_in_cached": 0,
640 "cost_per_1m_out_cached": 0,
641 "context_window": 128000,
642 "default_max_tokens": 4096,
643 "can_reason": false,
644 "supports_attachments": false
645 },
646 {
647 "id": "mercury-2",
648 "name": "Mercury 2",
649 "cost_per_1m_in": 0.3125,
650 "cost_per_1m_out": 0.9375,
651 "cost_per_1m_in_cached": 0,
652 "cost_per_1m_out_cached": 0,
653 "context_window": 128000,
654 "default_max_tokens": 50000,
655 "can_reason": true,
656 "reasoning_levels": [
657 "low",
658 "medium",
659 "high"
660 ],
661 "default_reasoning_effort": "medium",
662 "supports_attachments": false
663 },
664 {
665 "id": "minimax-m25",
666 "name": "MiniMax M2.5",
667 "cost_per_1m_in": 0.34,
668 "cost_per_1m_out": 1.19,
669 "cost_per_1m_in_cached": 0,
670 "cost_per_1m_out_cached": 0,
671 "context_window": 198000,
672 "default_max_tokens": 32768,
673 "can_reason": true,
674 "reasoning_levels": [
675 "low",
676 "medium",
677 "high"
678 ],
679 "default_reasoning_effort": "medium",
680 "supports_attachments": false
681 },
682 {
683 "id": "minimax-m27",
684 "name": "MiniMax M2.7",
685 "cost_per_1m_in": 0.375,
686 "cost_per_1m_out": 1.5,
687 "cost_per_1m_in_cached": 0,
688 "cost_per_1m_out_cached": 0,
689 "context_window": 198000,
690 "default_max_tokens": 32768,
691 "can_reason": true,
692 "reasoning_levels": [
693 "low",
694 "medium",
695 "high"
696 ],
697 "default_reasoning_effort": "medium",
698 "supports_attachments": false
699 },
700 {
701 "id": "mistral-small-3-2-24b-instruct",
702 "name": "Mistral Small 3.2 24B Instruct",
703 "cost_per_1m_in": 0.09375,
704 "cost_per_1m_out": 0.25,
705 "cost_per_1m_in_cached": 0,
706 "cost_per_1m_out_cached": 0,
707 "context_window": 256000,
708 "default_max_tokens": 16384,
709 "can_reason": false,
710 "supports_attachments": false
711 },
712 {
713 "id": "mistral-small-2603",
714 "name": "Mistral Small 4",
715 "cost_per_1m_in": 0.1875,
716 "cost_per_1m_out": 0.75,
717 "cost_per_1m_in_cached": 0,
718 "cost_per_1m_out_cached": 0,
719 "context_window": 256000,
720 "default_max_tokens": 65536,
721 "can_reason": true,
722 "reasoning_levels": [
723 "low",
724 "medium",
725 "high"
726 ],
727 "default_reasoning_effort": "medium",
728 "supports_attachments": true
729 },
730 {
731 "id": "nvidia-nemotron-3-nano-30b-a3b",
732 "name": "NVIDIA Nemotron 3 Nano 30B",
733 "cost_per_1m_in": 0.075,
734 "cost_per_1m_out": 0.3,
735 "cost_per_1m_in_cached": 0,
736 "cost_per_1m_out_cached": 0,
737 "context_window": 128000,
738 "default_max_tokens": 16384,
739 "can_reason": false,
740 "supports_attachments": false
741 },
742 {
743 "id": "nvidia-nemotron-cascade-2-30b-a3b",
744 "name": "Nemotron Cascade 2 30B A3B",
745 "cost_per_1m_in": 0.14,
746 "cost_per_1m_out": 0.8,
747 "cost_per_1m_in_cached": 0,
748 "cost_per_1m_out_cached": 0,
749 "context_window": 256000,
750 "default_max_tokens": 32768,
751 "can_reason": true,
752 "supports_attachments": false
753 },
754 {
755 "id": "openai-gpt-oss-120b",
756 "name": "OpenAI GPT OSS 120B",
757 "cost_per_1m_in": 0.07,
758 "cost_per_1m_out": 0.3,
759 "cost_per_1m_in_cached": 0,
760 "cost_per_1m_out_cached": 0,
761 "context_window": 128000,
762 "default_max_tokens": 16384,
763 "can_reason": false,
764 "supports_attachments": false
765 },
766 {
767 "id": "qwen3-235b-a22b-instruct-2507",
768 "name": "Qwen 3 235B A22B Instruct 2507",
769 "cost_per_1m_in": 0.15,
770 "cost_per_1m_out": 0.75,
771 "cost_per_1m_in_cached": 0,
772 "cost_per_1m_out_cached": 0,
773 "context_window": 128000,
774 "default_max_tokens": 16384,
775 "can_reason": false,
776 "supports_attachments": false
777 },
778 {
779 "id": "qwen3-235b-a22b-thinking-2507",
780 "name": "Qwen 3 235B A22B Thinking 2507",
781 "cost_per_1m_in": 0.45,
782 "cost_per_1m_out": 3.5,
783 "cost_per_1m_in_cached": 0,
784 "cost_per_1m_out_cached": 0,
785 "context_window": 128000,
786 "default_max_tokens": 16384,
787 "can_reason": true,
788 "supports_attachments": false
789 },
790 {
791 "id": "qwen3-coder-480b-a35b-instruct-turbo",
792 "name": "Qwen 3 Coder 480B Turbo",
793 "cost_per_1m_in": 0.35,
794 "cost_per_1m_out": 1.5,
795 "cost_per_1m_in_cached": 0,
796 "cost_per_1m_out_cached": 0,
797 "context_window": 256000,
798 "default_max_tokens": 65536,
799 "can_reason": false,
800 "supports_attachments": false
801 },
802 {
803 "id": "qwen3-coder-480b-a35b-instruct",
804 "name": "Qwen 3 Coder 480b",
805 "cost_per_1m_in": 0.75,
806 "cost_per_1m_out": 3,
807 "cost_per_1m_in_cached": 0,
808 "cost_per_1m_out_cached": 0,
809 "context_window": 256000,
810 "default_max_tokens": 65536,
811 "can_reason": false,
812 "supports_attachments": false
813 },
814 {
815 "id": "qwen3-next-80b",
816 "name": "Qwen 3 Next 80b",
817 "cost_per_1m_in": 0.35,
818 "cost_per_1m_out": 1.9,
819 "cost_per_1m_in_cached": 0,
820 "cost_per_1m_out_cached": 0,
821 "context_window": 256000,
822 "default_max_tokens": 16384,
823 "can_reason": false,
824 "supports_attachments": false
825 },
826 {
827 "id": "qwen3-5-35b-a3b",
828 "name": "Qwen 3.5 35B A3B",
829 "cost_per_1m_in": 0.3125,
830 "cost_per_1m_out": 1.25,
831 "cost_per_1m_in_cached": 0,
832 "cost_per_1m_out_cached": 0,
833 "context_window": 256000,
834 "default_max_tokens": 65536,
835 "can_reason": true,
836 "supports_attachments": true,
837 "options": {
838 "temperature": 1,
839 "top_p": 0.95
840 }
841 },
842 {
843 "id": "qwen3-5-397b-a17b",
844 "name": "Qwen 3.5 397B",
845 "cost_per_1m_in": 0.75,
846 "cost_per_1m_out": 4.5,
847 "cost_per_1m_in_cached": 0,
848 "cost_per_1m_out_cached": 0,
849 "context_window": 128000,
850 "default_max_tokens": 32768,
851 "can_reason": true,
852 "reasoning_levels": [
853 "low",
854 "medium",
855 "high"
856 ],
857 "default_reasoning_effort": "medium",
858 "supports_attachments": true
859 },
860 {
861 "id": "qwen3-5-9b",
862 "name": "Qwen 3.5 9B",
863 "cost_per_1m_in": 0.1,
864 "cost_per_1m_out": 0.15,
865 "cost_per_1m_in_cached": 0,
866 "cost_per_1m_out_cached": 0,
867 "context_window": 256000,
868 "default_max_tokens": 32768,
869 "can_reason": true,
870 "supports_attachments": true
871 },
872 {
873 "id": "qwen3-6-27b",
874 "name": "Qwen 3.6 27B",
875 "cost_per_1m_in": 0.325,
876 "cost_per_1m_out": 3.25,
877 "cost_per_1m_in_cached": 0,
878 "cost_per_1m_out_cached": 0,
879 "context_window": 256000,
880 "default_max_tokens": 65536,
881 "can_reason": true,
882 "supports_attachments": true,
883 "options": {
884 "temperature": 1,
885 "top_p": 0.95
886 }
887 },
888 {
889 "id": "qwen-3-6-plus",
890 "name": "Qwen 3.6 Plus Uncensored",
891 "cost_per_1m_in": 0.625,
892 "cost_per_1m_out": 3.75,
893 "cost_per_1m_in_cached": 0,
894 "cost_per_1m_out_cached": 0,
895 "context_window": 1000000,
896 "default_max_tokens": 65536,
897 "can_reason": true,
898 "supports_attachments": true,
899 "options": {
900 "temperature": 0.7,
901 "top_p": 0.8
902 }
903 },
904 {
905 "id": "e2ee-qwen3-30b-a3b-p",
906 "name": "Qwen3 30B A3B",
907 "cost_per_1m_in": 0.19,
908 "cost_per_1m_out": 0.69,
909 "cost_per_1m_in_cached": 0,
910 "cost_per_1m_out_cached": 0,
911 "context_window": 256000,
912 "default_max_tokens": 32768,
913 "can_reason": false,
914 "supports_attachments": false
915 },
916 {
917 "id": "qwen3-vl-235b-a22b",
918 "name": "Qwen3 VL 235B",
919 "cost_per_1m_in": 0.25,
920 "cost_per_1m_out": 1.5,
921 "cost_per_1m_in_cached": 0,
922 "cost_per_1m_out_cached": 0,
923 "context_window": 256000,
924 "default_max_tokens": 16384,
925 "can_reason": false,
926 "supports_attachments": true
927 },
928 {
929 "id": "e2ee-qwen3-vl-30b-a3b-p",
930 "name": "Qwen3 VL 30B A3B",
931 "cost_per_1m_in": 0.25,
932 "cost_per_1m_out": 0.9,
933 "cost_per_1m_in_cached": 0,
934 "cost_per_1m_out_cached": 0,
935 "context_window": 128000,
936 "default_max_tokens": 4096,
937 "can_reason": false,
938 "supports_attachments": true
939 },
940 {
941 "id": "e2ee-qwen3-5-122b-a10b",
942 "name": "Qwen3.5 122B A10B",
943 "cost_per_1m_in": 0.5,
944 "cost_per_1m_out": 4,
945 "cost_per_1m_in_cached": 0,
946 "cost_per_1m_out_cached": 0,
947 "context_window": 128000,
948 "default_max_tokens": 32768,
949 "can_reason": true,
950 "supports_attachments": true
951 },
952 {
953 "id": "arcee-trinity-large-thinking",
954 "name": "Trinity Large Thinking",
955 "cost_per_1m_in": 0.3125,
956 "cost_per_1m_out": 1.125,
957 "cost_per_1m_in_cached": 0,
958 "cost_per_1m_out_cached": 0,
959 "context_window": 256000,
960 "default_max_tokens": 65536,
961 "can_reason": true,
962 "reasoning_levels": [
963 "low",
964 "medium",
965 "high"
966 ],
967 "default_reasoning_effort": "medium",
968 "supports_attachments": false
969 },
970 {
971 "id": "venice-uncensored-role-play",
972 "name": "Venice Role Play Uncensored",
973 "cost_per_1m_in": 0.5,
974 "cost_per_1m_out": 2,
975 "cost_per_1m_in_cached": 0,
976 "cost_per_1m_out_cached": 0,
977 "context_window": 128000,
978 "default_max_tokens": 4096,
979 "can_reason": false,
980 "supports_attachments": true
981 },
982 {
983 "id": "venice-uncensored-1-2",
984 "name": "Venice Uncensored 1.2",
985 "cost_per_1m_in": 0.2,
986 "cost_per_1m_out": 0.9,
987 "cost_per_1m_in_cached": 0,
988 "cost_per_1m_out_cached": 0,
989 "context_window": 128000,
990 "default_max_tokens": 8192,
991 "can_reason": false,
992 "supports_attachments": true
993 }
994 ]
995}