1{
2 "name": "Venice AI",
3 "id": "venice",
4 "api_key": "$VENICE_API_KEY",
5 "api_endpoint": "https://api.venice.ai/api/v1",
6 "type": "openai-compat",
7 "default_large_model_id": "claude-opus-4-6-fast",
8 "default_small_model_id": "mistral-small-2603",
9 "models": [
10 {
11 "id": "claude-opus-4-5",
12 "name": "Claude Opus 4.5",
13 "cost_per_1m_in": 6,
14 "cost_per_1m_out": 30,
15 "cost_per_1m_in_cached": 0,
16 "cost_per_1m_out_cached": 0,
17 "context_window": 198000,
18 "default_max_tokens": 32768,
19 "can_reason": true,
20 "reasoning_levels": [
21 "low",
22 "medium",
23 "high"
24 ],
25 "default_reasoning_effort": "medium",
26 "supports_attachments": true
27 },
28 {
29 "id": "claude-opus-4-6",
30 "name": "Claude Opus 4.6",
31 "cost_per_1m_in": 6,
32 "cost_per_1m_out": 30,
33 "cost_per_1m_in_cached": 0,
34 "cost_per_1m_out_cached": 0,
35 "context_window": 1000000,
36 "default_max_tokens": 128000,
37 "can_reason": true,
38 "reasoning_levels": [
39 "low",
40 "medium",
41 "high"
42 ],
43 "default_reasoning_effort": "medium",
44 "supports_attachments": true
45 },
46 {
47 "id": "claude-opus-4-6-fast",
48 "name": "Claude Opus 4.6 Fast",
49 "cost_per_1m_in": 36,
50 "cost_per_1m_out": 180,
51 "cost_per_1m_in_cached": 0,
52 "cost_per_1m_out_cached": 0,
53 "context_window": 1000000,
54 "default_max_tokens": 128000,
55 "can_reason": true,
56 "reasoning_levels": [
57 "low",
58 "medium",
59 "high"
60 ],
61 "default_reasoning_effort": "medium",
62 "supports_attachments": true
63 },
64 {
65 "id": "claude-opus-4-7",
66 "name": "Claude Opus 4.7",
67 "cost_per_1m_in": 6,
68 "cost_per_1m_out": 30,
69 "cost_per_1m_in_cached": 0,
70 "cost_per_1m_out_cached": 0,
71 "context_window": 1000000,
72 "default_max_tokens": 128000,
73 "can_reason": true,
74 "reasoning_levels": [
75 "low",
76 "medium",
77 "high"
78 ],
79 "default_reasoning_effort": "medium",
80 "supports_attachments": true
81 },
82 {
83 "id": "claude-sonnet-4-5",
84 "name": "Claude Sonnet 4.5",
85 "cost_per_1m_in": 3.75,
86 "cost_per_1m_out": 18.75,
87 "cost_per_1m_in_cached": 0,
88 "cost_per_1m_out_cached": 0,
89 "context_window": 198000,
90 "default_max_tokens": 64000,
91 "can_reason": true,
92 "reasoning_levels": [
93 "low",
94 "medium",
95 "high"
96 ],
97 "default_reasoning_effort": "medium",
98 "supports_attachments": true
99 },
100 {
101 "id": "claude-sonnet-4-6",
102 "name": "Claude Sonnet 4.6",
103 "cost_per_1m_in": 3.6,
104 "cost_per_1m_out": 18,
105 "cost_per_1m_in_cached": 0,
106 "cost_per_1m_out_cached": 0,
107 "context_window": 1000000,
108 "default_max_tokens": 64000,
109 "can_reason": true,
110 "reasoning_levels": [
111 "low",
112 "medium",
113 "high"
114 ],
115 "default_reasoning_effort": "medium",
116 "supports_attachments": true
117 },
118 {
119 "id": "deepseek-v3.2",
120 "name": "DeepSeek V3.2",
121 "cost_per_1m_in": 0.33,
122 "cost_per_1m_out": 0.48,
123 "cost_per_1m_in_cached": 0,
124 "cost_per_1m_out_cached": 0,
125 "context_window": 160000,
126 "default_max_tokens": 32768,
127 "can_reason": true,
128 "supports_attachments": false
129 },
130 {
131 "id": "zai-org-glm-4.6",
132 "name": "GLM 4.6",
133 "cost_per_1m_in": 0.85,
134 "cost_per_1m_out": 2.75,
135 "cost_per_1m_in_cached": 0,
136 "cost_per_1m_out_cached": 0,
137 "context_window": 198000,
138 "default_max_tokens": 16384,
139 "can_reason": true,
140 "supports_attachments": false
141 },
142 {
143 "id": "zai-org-glm-4.7",
144 "name": "GLM 4.7",
145 "cost_per_1m_in": 0.55,
146 "cost_per_1m_out": 2.65,
147 "cost_per_1m_in_cached": 0,
148 "cost_per_1m_out_cached": 0,
149 "context_window": 198000,
150 "default_max_tokens": 16384,
151 "can_reason": true,
152 "supports_attachments": false
153 },
154 {
155 "id": "zai-org-glm-4.7-flash",
156 "name": "GLM 4.7 Flash",
157 "cost_per_1m_in": 0.125,
158 "cost_per_1m_out": 0.5,
159 "cost_per_1m_in_cached": 0,
160 "cost_per_1m_out_cached": 0,
161 "context_window": 128000,
162 "default_max_tokens": 16384,
163 "can_reason": true,
164 "reasoning_levels": [
165 "low",
166 "medium",
167 "high"
168 ],
169 "default_reasoning_effort": "medium",
170 "supports_attachments": false
171 },
172 {
173 "id": "olafangensan-glm-4.7-flash-heretic",
174 "name": "GLM 4.7 Flash Heretic",
175 "cost_per_1m_in": 0.14,
176 "cost_per_1m_out": 0.8,
177 "cost_per_1m_in_cached": 0,
178 "cost_per_1m_out_cached": 0,
179 "context_window": 200000,
180 "default_max_tokens": 24000,
181 "can_reason": true,
182 "supports_attachments": false
183 },
184 {
185 "id": "zai-org-glm-5",
186 "name": "GLM 5",
187 "cost_per_1m_in": 1,
188 "cost_per_1m_out": 3.2,
189 "cost_per_1m_in_cached": 0,
190 "cost_per_1m_out_cached": 0,
191 "context_window": 198000,
192 "default_max_tokens": 32000,
193 "can_reason": true,
194 "supports_attachments": false
195 },
196 {
197 "id": "z-ai-glm-5-turbo",
198 "name": "GLM 5 Turbo",
199 "cost_per_1m_in": 1.2,
200 "cost_per_1m_out": 4,
201 "cost_per_1m_in_cached": 0,
202 "cost_per_1m_out_cached": 0,
203 "context_window": 200000,
204 "default_max_tokens": 32768,
205 "can_reason": true,
206 "reasoning_levels": [
207 "low",
208 "medium",
209 "high"
210 ],
211 "default_reasoning_effort": "medium",
212 "supports_attachments": false
213 },
214 {
215 "id": "zai-org-glm-5-1",
216 "name": "GLM 5.1",
217 "cost_per_1m_in": 1.75,
218 "cost_per_1m_out": 5.5,
219 "cost_per_1m_in_cached": 0,
220 "cost_per_1m_out_cached": 0,
221 "context_window": 200000,
222 "default_max_tokens": 24000,
223 "can_reason": true,
224 "supports_attachments": false
225 },
226 {
227 "id": "z-ai-glm-5v-turbo",
228 "name": "GLM 5V Turbo",
229 "cost_per_1m_in": 1.5,
230 "cost_per_1m_out": 5,
231 "cost_per_1m_in_cached": 0,
232 "cost_per_1m_out_cached": 0,
233 "context_window": 200000,
234 "default_max_tokens": 32768,
235 "can_reason": true,
236 "reasoning_levels": [
237 "low",
238 "medium",
239 "high"
240 ],
241 "default_reasoning_effort": "medium",
242 "supports_attachments": true
243 },
244 {
245 "id": "openai-gpt-4o-2024-11-20",
246 "name": "GPT-4o",
247 "cost_per_1m_in": 3.125,
248 "cost_per_1m_out": 12.5,
249 "cost_per_1m_in_cached": 0,
250 "cost_per_1m_out_cached": 0,
251 "context_window": 128000,
252 "default_max_tokens": 16384,
253 "can_reason": false,
254 "supports_attachments": true
255 },
256 {
257 "id": "openai-gpt-4o-mini-2024-07-18",
258 "name": "GPT-4o Mini",
259 "cost_per_1m_in": 0.1875,
260 "cost_per_1m_out": 0.75,
261 "cost_per_1m_in_cached": 0,
262 "cost_per_1m_out_cached": 0,
263 "context_window": 128000,
264 "default_max_tokens": 16384,
265 "can_reason": false,
266 "supports_attachments": true
267 },
268 {
269 "id": "openai-gpt-52",
270 "name": "GPT-5.2",
271 "cost_per_1m_in": 2.19,
272 "cost_per_1m_out": 17.5,
273 "cost_per_1m_in_cached": 0,
274 "cost_per_1m_out_cached": 0,
275 "context_window": 256000,
276 "default_max_tokens": 65536,
277 "can_reason": true,
278 "reasoning_levels": [
279 "low",
280 "medium",
281 "high"
282 ],
283 "default_reasoning_effort": "medium",
284 "supports_attachments": false
285 },
286 {
287 "id": "openai-gpt-52-codex",
288 "name": "GPT-5.2 Codex",
289 "cost_per_1m_in": 2.19,
290 "cost_per_1m_out": 17.5,
291 "cost_per_1m_in_cached": 0,
292 "cost_per_1m_out_cached": 0,
293 "context_window": 256000,
294 "default_max_tokens": 65536,
295 "can_reason": true,
296 "reasoning_levels": [
297 "low",
298 "medium",
299 "high"
300 ],
301 "default_reasoning_effort": "medium",
302 "supports_attachments": true
303 },
304 {
305 "id": "openai-gpt-53-codex",
306 "name": "GPT-5.3 Codex",
307 "cost_per_1m_in": 2.19,
308 "cost_per_1m_out": 17.5,
309 "cost_per_1m_in_cached": 0,
310 "cost_per_1m_out_cached": 0,
311 "context_window": 400000,
312 "default_max_tokens": 128000,
313 "can_reason": true,
314 "reasoning_levels": [
315 "low",
316 "medium",
317 "high"
318 ],
319 "default_reasoning_effort": "medium",
320 "supports_attachments": true
321 },
322 {
323 "id": "openai-gpt-54",
324 "name": "GPT-5.4",
325 "cost_per_1m_in": 3.13,
326 "cost_per_1m_out": 18.8,
327 "cost_per_1m_in_cached": 0,
328 "cost_per_1m_out_cached": 0,
329 "context_window": 1000000,
330 "default_max_tokens": 131072,
331 "can_reason": true,
332 "reasoning_levels": [
333 "low",
334 "medium",
335 "high"
336 ],
337 "default_reasoning_effort": "medium",
338 "supports_attachments": true
339 },
340 {
341 "id": "openai-gpt-54-mini",
342 "name": "GPT-5.4 Mini",
343 "cost_per_1m_in": 0.9375,
344 "cost_per_1m_out": 5.625,
345 "cost_per_1m_in_cached": 0,
346 "cost_per_1m_out_cached": 0,
347 "context_window": 400000,
348 "default_max_tokens": 128000,
349 "can_reason": true,
350 "reasoning_levels": [
351 "low",
352 "medium",
353 "high"
354 ],
355 "default_reasoning_effort": "medium",
356 "supports_attachments": true
357 },
358 {
359 "id": "openai-gpt-54-pro",
360 "name": "GPT-5.4 Pro",
361 "cost_per_1m_in": 37.5,
362 "cost_per_1m_out": 225,
363 "cost_per_1m_in_cached": 0,
364 "cost_per_1m_out_cached": 0,
365 "context_window": 1000000,
366 "default_max_tokens": 128000,
367 "can_reason": true,
368 "reasoning_levels": [
369 "low",
370 "medium",
371 "high"
372 ],
373 "default_reasoning_effort": "medium",
374 "supports_attachments": true
375 },
376 {
377 "id": "gemini-3-flash-preview",
378 "name": "Gemini 3 Flash Preview",
379 "cost_per_1m_in": 0.7,
380 "cost_per_1m_out": 3.75,
381 "cost_per_1m_in_cached": 0,
382 "cost_per_1m_out_cached": 0,
383 "context_window": 256000,
384 "default_max_tokens": 65536,
385 "can_reason": true,
386 "reasoning_levels": [
387 "low",
388 "medium",
389 "high"
390 ],
391 "default_reasoning_effort": "medium",
392 "supports_attachments": true
393 },
394 {
395 "id": "gemini-3-1-pro-preview",
396 "name": "Gemini 3.1 Pro Preview",
397 "cost_per_1m_in": 2.5,
398 "cost_per_1m_out": 15,
399 "cost_per_1m_in_cached": 0,
400 "cost_per_1m_out_cached": 0,
401 "context_window": 1000000,
402 "default_max_tokens": 32768,
403 "can_reason": true,
404 "reasoning_levels": [
405 "low",
406 "medium",
407 "high"
408 ],
409 "default_reasoning_effort": "medium",
410 "supports_attachments": true
411 },
412 {
413 "id": "gemma-4-uncensored",
414 "name": "Gemma 4 Uncensored",
415 "cost_per_1m_in": 0.1625,
416 "cost_per_1m_out": 0.5,
417 "cost_per_1m_in_cached": 0,
418 "cost_per_1m_out_cached": 0,
419 "context_window": 256000,
420 "default_max_tokens": 8192,
421 "can_reason": false,
422 "supports_attachments": true
423 },
424 {
425 "id": "google-gemma-3-27b-it",
426 "name": "Google Gemma 3 27B Instruct",
427 "cost_per_1m_in": 0.12,
428 "cost_per_1m_out": 0.2,
429 "cost_per_1m_in_cached": 0,
430 "cost_per_1m_out_cached": 0,
431 "context_window": 198000,
432 "default_max_tokens": 16384,
433 "can_reason": false,
434 "supports_attachments": true
435 },
436 {
437 "id": "google-gemma-4-26b-a4b-it",
438 "name": "Google Gemma 4 26B A4B Instruct",
439 "cost_per_1m_in": 0.1625,
440 "cost_per_1m_out": 0.5,
441 "cost_per_1m_in_cached": 0,
442 "cost_per_1m_out_cached": 0,
443 "context_window": 256000,
444 "default_max_tokens": 8192,
445 "can_reason": true,
446 "supports_attachments": true
447 },
448 {
449 "id": "google-gemma-4-31b-it",
450 "name": "Google Gemma 4 31B Instruct",
451 "cost_per_1m_in": 0.175,
452 "cost_per_1m_out": 0.5,
453 "cost_per_1m_in_cached": 0,
454 "cost_per_1m_out_cached": 0,
455 "context_window": 256000,
456 "default_max_tokens": 8192,
457 "can_reason": true,
458 "supports_attachments": true
459 },
460 {
461 "id": "grok-41-fast",
462 "name": "Grok 4.1 Fast",
463 "cost_per_1m_in": 0.23,
464 "cost_per_1m_out": 0.57,
465 "cost_per_1m_in_cached": 0,
466 "cost_per_1m_out_cached": 0,
467 "context_window": 1000000,
468 "default_max_tokens": 30000,
469 "can_reason": true,
470 "supports_attachments": true
471 },
472 {
473 "id": "grok-4-20",
474 "name": "Grok 4.20",
475 "cost_per_1m_in": 2.27,
476 "cost_per_1m_out": 6.8,
477 "cost_per_1m_in_cached": 0,
478 "cost_per_1m_out_cached": 0,
479 "context_window": 2000000,
480 "default_max_tokens": 128000,
481 "can_reason": true,
482 "supports_attachments": true
483 },
484 {
485 "id": "kimi-k2-thinking",
486 "name": "Kimi K2 Thinking",
487 "cost_per_1m_in": 0.75,
488 "cost_per_1m_out": 3.2,
489 "cost_per_1m_in_cached": 0,
490 "cost_per_1m_out_cached": 0,
491 "context_window": 256000,
492 "default_max_tokens": 65536,
493 "can_reason": true,
494 "reasoning_levels": [
495 "low",
496 "medium",
497 "high"
498 ],
499 "default_reasoning_effort": "medium",
500 "supports_attachments": false
501 },
502 {
503 "id": "kimi-k2-5",
504 "name": "Kimi K2.5",
505 "cost_per_1m_in": 0.56,
506 "cost_per_1m_out": 3.5,
507 "cost_per_1m_in_cached": 0,
508 "cost_per_1m_out_cached": 0,
509 "context_window": 256000,
510 "default_max_tokens": 65536,
511 "can_reason": true,
512 "reasoning_levels": [
513 "low",
514 "medium",
515 "high"
516 ],
517 "default_reasoning_effort": "medium",
518 "supports_attachments": true
519 },
520 {
521 "id": "kimi-k2-6",
522 "name": "Kimi K2.6",
523 "cost_per_1m_in": 0.7448,
524 "cost_per_1m_out": 4.655,
525 "cost_per_1m_in_cached": 0,
526 "cost_per_1m_out_cached": 0,
527 "context_window": 256000,
528 "default_max_tokens": 65536,
529 "can_reason": true,
530 "reasoning_levels": [
531 "low",
532 "medium",
533 "high"
534 ],
535 "default_reasoning_effort": "medium",
536 "supports_attachments": true
537 },
538 {
539 "id": "llama-3.2-3b",
540 "name": "Llama 3.2 3B",
541 "cost_per_1m_in": 0.15,
542 "cost_per_1m_out": 0.6,
543 "cost_per_1m_in_cached": 0,
544 "cost_per_1m_out_cached": 0,
545 "context_window": 128000,
546 "default_max_tokens": 4096,
547 "can_reason": false,
548 "supports_attachments": false
549 },
550 {
551 "id": "llama-3.3-70b",
552 "name": "Llama 3.3 70B",
553 "cost_per_1m_in": 0.7,
554 "cost_per_1m_out": 2.8,
555 "cost_per_1m_in_cached": 0,
556 "cost_per_1m_out_cached": 0,
557 "context_window": 128000,
558 "default_max_tokens": 4096,
559 "can_reason": false,
560 "supports_attachments": false
561 },
562 {
563 "id": "mercury-2",
564 "name": "Mercury 2",
565 "cost_per_1m_in": 0.3125,
566 "cost_per_1m_out": 0.9375,
567 "cost_per_1m_in_cached": 0,
568 "cost_per_1m_out_cached": 0,
569 "context_window": 128000,
570 "default_max_tokens": 50000,
571 "can_reason": true,
572 "reasoning_levels": [
573 "low",
574 "medium",
575 "high"
576 ],
577 "default_reasoning_effort": "medium",
578 "supports_attachments": false
579 },
580 {
581 "id": "minimax-m25",
582 "name": "MiniMax M2.5",
583 "cost_per_1m_in": 0.34,
584 "cost_per_1m_out": 1.19,
585 "cost_per_1m_in_cached": 0,
586 "cost_per_1m_out_cached": 0,
587 "context_window": 198000,
588 "default_max_tokens": 32768,
589 "can_reason": true,
590 "reasoning_levels": [
591 "low",
592 "medium",
593 "high"
594 ],
595 "default_reasoning_effort": "medium",
596 "supports_attachments": false
597 },
598 {
599 "id": "minimax-m27",
600 "name": "MiniMax M2.7",
601 "cost_per_1m_in": 0.375,
602 "cost_per_1m_out": 1.5,
603 "cost_per_1m_in_cached": 0,
604 "cost_per_1m_out_cached": 0,
605 "context_window": 198000,
606 "default_max_tokens": 32768,
607 "can_reason": true,
608 "reasoning_levels": [
609 "low",
610 "medium",
611 "high"
612 ],
613 "default_reasoning_effort": "medium",
614 "supports_attachments": false
615 },
616 {
617 "id": "mistral-small-3-2-24b-instruct",
618 "name": "Mistral Small 3.2 24B Instruct",
619 "cost_per_1m_in": 0.09375,
620 "cost_per_1m_out": 0.25,
621 "cost_per_1m_in_cached": 0,
622 "cost_per_1m_out_cached": 0,
623 "context_window": 256000,
624 "default_max_tokens": 16384,
625 "can_reason": false,
626 "supports_attachments": false
627 },
628 {
629 "id": "mistral-small-2603",
630 "name": "Mistral Small 4",
631 "cost_per_1m_in": 0.1875,
632 "cost_per_1m_out": 0.75,
633 "cost_per_1m_in_cached": 0,
634 "cost_per_1m_out_cached": 0,
635 "context_window": 256000,
636 "default_max_tokens": 65536,
637 "can_reason": true,
638 "reasoning_levels": [
639 "low",
640 "medium",
641 "high"
642 ],
643 "default_reasoning_effort": "medium",
644 "supports_attachments": true
645 },
646 {
647 "id": "nvidia-nemotron-3-nano-30b-a3b",
648 "name": "NVIDIA Nemotron 3 Nano 30B",
649 "cost_per_1m_in": 0.075,
650 "cost_per_1m_out": 0.3,
651 "cost_per_1m_in_cached": 0,
652 "cost_per_1m_out_cached": 0,
653 "context_window": 128000,
654 "default_max_tokens": 16384,
655 "can_reason": false,
656 "supports_attachments": false
657 },
658 {
659 "id": "nvidia-nemotron-cascade-2-30b-a3b",
660 "name": "Nemotron Cascade 2 30B A3B",
661 "cost_per_1m_in": 0.14,
662 "cost_per_1m_out": 0.8,
663 "cost_per_1m_in_cached": 0,
664 "cost_per_1m_out_cached": 0,
665 "context_window": 256000,
666 "default_max_tokens": 32768,
667 "can_reason": true,
668 "supports_attachments": false
669 },
670 {
671 "id": "openai-gpt-oss-120b",
672 "name": "OpenAI GPT OSS 120B",
673 "cost_per_1m_in": 0.07,
674 "cost_per_1m_out": 0.3,
675 "cost_per_1m_in_cached": 0,
676 "cost_per_1m_out_cached": 0,
677 "context_window": 128000,
678 "default_max_tokens": 16384,
679 "can_reason": false,
680 "supports_attachments": false
681 },
682 {
683 "id": "qwen3-235b-a22b-instruct-2507",
684 "name": "Qwen 3 235B A22B Instruct 2507",
685 "cost_per_1m_in": 0.15,
686 "cost_per_1m_out": 0.75,
687 "cost_per_1m_in_cached": 0,
688 "cost_per_1m_out_cached": 0,
689 "context_window": 128000,
690 "default_max_tokens": 16384,
691 "can_reason": false,
692 "supports_attachments": false
693 },
694 {
695 "id": "qwen3-235b-a22b-thinking-2507",
696 "name": "Qwen 3 235B A22B Thinking 2507",
697 "cost_per_1m_in": 0.45,
698 "cost_per_1m_out": 3.5,
699 "cost_per_1m_in_cached": 0,
700 "cost_per_1m_out_cached": 0,
701 "context_window": 128000,
702 "default_max_tokens": 16384,
703 "can_reason": true,
704 "supports_attachments": false
705 },
706 {
707 "id": "qwen3-coder-480b-a35b-instruct-turbo",
708 "name": "Qwen 3 Coder 480B Turbo",
709 "cost_per_1m_in": 0.35,
710 "cost_per_1m_out": 1.5,
711 "cost_per_1m_in_cached": 0,
712 "cost_per_1m_out_cached": 0,
713 "context_window": 256000,
714 "default_max_tokens": 65536,
715 "can_reason": false,
716 "supports_attachments": false
717 },
718 {
719 "id": "qwen3-coder-480b-a35b-instruct",
720 "name": "Qwen 3 Coder 480b",
721 "cost_per_1m_in": 0.75,
722 "cost_per_1m_out": 3,
723 "cost_per_1m_in_cached": 0,
724 "cost_per_1m_out_cached": 0,
725 "context_window": 256000,
726 "default_max_tokens": 65536,
727 "can_reason": false,
728 "supports_attachments": false
729 },
730 {
731 "id": "qwen3-next-80b",
732 "name": "Qwen 3 Next 80b",
733 "cost_per_1m_in": 0.35,
734 "cost_per_1m_out": 1.9,
735 "cost_per_1m_in_cached": 0,
736 "cost_per_1m_out_cached": 0,
737 "context_window": 256000,
738 "default_max_tokens": 16384,
739 "can_reason": false,
740 "supports_attachments": false
741 },
742 {
743 "id": "qwen3-5-35b-a3b",
744 "name": "Qwen 3.5 35B A3B",
745 "cost_per_1m_in": 0.3125,
746 "cost_per_1m_out": 1.25,
747 "cost_per_1m_in_cached": 0,
748 "cost_per_1m_out_cached": 0,
749 "context_window": 256000,
750 "default_max_tokens": 65536,
751 "can_reason": true,
752 "supports_attachments": true,
753 "options": {
754 "temperature": 1,
755 "top_p": 0.95
756 }
757 },
758 {
759 "id": "qwen3-5-397b-a17b",
760 "name": "Qwen 3.5 397B",
761 "cost_per_1m_in": 0.75,
762 "cost_per_1m_out": 4.5,
763 "cost_per_1m_in_cached": 0,
764 "cost_per_1m_out_cached": 0,
765 "context_window": 128000,
766 "default_max_tokens": 32768,
767 "can_reason": true,
768 "reasoning_levels": [
769 "low",
770 "medium",
771 "high"
772 ],
773 "default_reasoning_effort": "medium",
774 "supports_attachments": true
775 },
776 {
777 "id": "qwen3-5-9b",
778 "name": "Qwen 3.5 9B",
779 "cost_per_1m_in": 0.1,
780 "cost_per_1m_out": 0.15,
781 "cost_per_1m_in_cached": 0,
782 "cost_per_1m_out_cached": 0,
783 "context_window": 256000,
784 "default_max_tokens": 32768,
785 "can_reason": true,
786 "supports_attachments": true
787 },
788 {
789 "id": "qwen-3-6-plus",
790 "name": "Qwen 3.6 Plus Uncensored",
791 "cost_per_1m_in": 0.625,
792 "cost_per_1m_out": 3.75,
793 "cost_per_1m_in_cached": 0,
794 "cost_per_1m_out_cached": 0,
795 "context_window": 1000000,
796 "default_max_tokens": 65536,
797 "can_reason": true,
798 "supports_attachments": true,
799 "options": {
800 "temperature": 0.7,
801 "top_p": 0.8
802 }
803 },
804 {
805 "id": "e2ee-qwen3-30b-a3b-p",
806 "name": "Qwen3 30B A3B",
807 "cost_per_1m_in": 0.19,
808 "cost_per_1m_out": 0.69,
809 "cost_per_1m_in_cached": 0,
810 "cost_per_1m_out_cached": 0,
811 "context_window": 256000,
812 "default_max_tokens": 32768,
813 "can_reason": false,
814 "supports_attachments": false
815 },
816 {
817 "id": "qwen3-vl-235b-a22b",
818 "name": "Qwen3 VL 235B",
819 "cost_per_1m_in": 0.25,
820 "cost_per_1m_out": 1.5,
821 "cost_per_1m_in_cached": 0,
822 "cost_per_1m_out_cached": 0,
823 "context_window": 256000,
824 "default_max_tokens": 16384,
825 "can_reason": false,
826 "supports_attachments": true
827 },
828 {
829 "id": "e2ee-qwen3-vl-30b-a3b-p",
830 "name": "Qwen3 VL 30B A3B",
831 "cost_per_1m_in": 0.25,
832 "cost_per_1m_out": 0.9,
833 "cost_per_1m_in_cached": 0,
834 "cost_per_1m_out_cached": 0,
835 "context_window": 128000,
836 "default_max_tokens": 4096,
837 "can_reason": false,
838 "supports_attachments": true
839 },
840 {
841 "id": "e2ee-qwen3-5-122b-a10b",
842 "name": "Qwen3.5 122B A10B",
843 "cost_per_1m_in": 0.5,
844 "cost_per_1m_out": 4,
845 "cost_per_1m_in_cached": 0,
846 "cost_per_1m_out_cached": 0,
847 "context_window": 128000,
848 "default_max_tokens": 32768,
849 "can_reason": true,
850 "supports_attachments": true
851 },
852 {
853 "id": "arcee-trinity-large-thinking",
854 "name": "Trinity Large Thinking",
855 "cost_per_1m_in": 0.3125,
856 "cost_per_1m_out": 1.125,
857 "cost_per_1m_in_cached": 0,
858 "cost_per_1m_out_cached": 0,
859 "context_window": 256000,
860 "default_max_tokens": 65536,
861 "can_reason": true,
862 "reasoning_levels": [
863 "low",
864 "medium",
865 "high"
866 ],
867 "default_reasoning_effort": "medium",
868 "supports_attachments": false
869 },
870 {
871 "id": "venice-uncensored-role-play",
872 "name": "Venice Role Play Uncensored",
873 "cost_per_1m_in": 0.5,
874 "cost_per_1m_out": 2,
875 "cost_per_1m_in_cached": 0,
876 "cost_per_1m_out_cached": 0,
877 "context_window": 128000,
878 "default_max_tokens": 4096,
879 "can_reason": false,
880 "supports_attachments": true
881 },
882 {
883 "id": "venice-uncensored-1-2",
884 "name": "Venice Uncensored 1.2",
885 "cost_per_1m_in": 0.2,
886 "cost_per_1m_out": 0.9,
887 "cost_per_1m_in_cached": 0,
888 "cost_per_1m_out_cached": 0,
889 "context_window": 128000,
890 "default_max_tokens": 8192,
891 "can_reason": false,
892 "supports_attachments": true
893 }
894 ]
895}