1{
2 "name": "Venice AI",
3 "id": "venice",
4 "api_key": "$VENICE_API_KEY",
5 "api_endpoint": "https://api.venice.ai/api/v1",
6 "type": "openai-compat",
7 "default_large_model_id": "claude-opus-4-6-fast",
8 "default_small_model_id": "deepseek-v4-flash",
9 "models": [
10 {
11 "id": "claude-opus-4-5",
12 "name": "Claude Opus 4.5",
13 "cost_per_1m_in": 6,
14 "cost_per_1m_out": 30,
15 "cost_per_1m_in_cached": 0,
16 "cost_per_1m_out_cached": 0,
17 "context_window": 198000,
18 "default_max_tokens": 32768,
19 "can_reason": true,
20 "supports_attachments": true
21 },
22 {
23 "id": "claude-opus-4-6",
24 "name": "Claude Opus 4.6",
25 "cost_per_1m_in": 6,
26 "cost_per_1m_out": 30,
27 "cost_per_1m_in_cached": 0,
28 "cost_per_1m_out_cached": 0,
29 "context_window": 1000000,
30 "default_max_tokens": 128000,
31 "can_reason": true,
32 "supports_attachments": true
33 },
34 {
35 "id": "claude-opus-4-6-fast",
36 "name": "Claude Opus 4.6 Fast",
37 "cost_per_1m_in": 36,
38 "cost_per_1m_out": 180,
39 "cost_per_1m_in_cached": 0,
40 "cost_per_1m_out_cached": 0,
41 "context_window": 1000000,
42 "default_max_tokens": 128000,
43 "can_reason": true,
44 "supports_attachments": true
45 },
46 {
47 "id": "claude-opus-4-7",
48 "name": "Claude Opus 4.7",
49 "cost_per_1m_in": 6,
50 "cost_per_1m_out": 30,
51 "cost_per_1m_in_cached": 0,
52 "cost_per_1m_out_cached": 0,
53 "context_window": 1000000,
54 "default_max_tokens": 128000,
55 "can_reason": true,
56 "supports_attachments": true
57 },
58 {
59 "id": "claude-sonnet-4-5",
60 "name": "Claude Sonnet 4.5",
61 "cost_per_1m_in": 3.75,
62 "cost_per_1m_out": 18.75,
63 "cost_per_1m_in_cached": 0,
64 "cost_per_1m_out_cached": 0,
65 "context_window": 198000,
66 "default_max_tokens": 64000,
67 "can_reason": true,
68 "supports_attachments": true
69 },
70 {
71 "id": "claude-sonnet-4-6",
72 "name": "Claude Sonnet 4.6",
73 "cost_per_1m_in": 3.6,
74 "cost_per_1m_out": 18,
75 "cost_per_1m_in_cached": 0,
76 "cost_per_1m_out_cached": 0,
77 "context_window": 1000000,
78 "default_max_tokens": 64000,
79 "can_reason": true,
80 "supports_attachments": true
81 },
82 {
83 "id": "deepseek-v3.2",
84 "name": "DeepSeek V3.2",
85 "cost_per_1m_in": 0.33,
86 "cost_per_1m_out": 0.48,
87 "cost_per_1m_in_cached": 0,
88 "cost_per_1m_out_cached": 0,
89 "context_window": 160000,
90 "default_max_tokens": 32768,
91 "can_reason": true,
92 "reasoning_levels": [
93 "low",
94 "medium",
95 "high"
96 ],
97 "default_reasoning_effort": "medium",
98 "supports_attachments": false
99 },
100 {
101 "id": "deepseek-v4-flash",
102 "name": "DeepSeek V4 Flash",
103 "cost_per_1m_in": 0.17,
104 "cost_per_1m_out": 0.35,
105 "cost_per_1m_in_cached": 0,
106 "cost_per_1m_out_cached": 0,
107 "context_window": 1000000,
108 "default_max_tokens": 32768,
109 "can_reason": true,
110 "supports_attachments": false
111 },
112 {
113 "id": "deepseek-v4-pro",
114 "name": "DeepSeek V4 Pro",
115 "cost_per_1m_in": 1.73,
116 "cost_per_1m_out": 3.796,
117 "cost_per_1m_in_cached": 0,
118 "cost_per_1m_out_cached": 0,
119 "context_window": 1000000,
120 "default_max_tokens": 32768,
121 "can_reason": true,
122 "supports_attachments": false
123 },
124 {
125 "id": "zai-org-glm-4.6",
126 "name": "GLM 4.6",
127 "cost_per_1m_in": 0.85,
128 "cost_per_1m_out": 2.75,
129 "cost_per_1m_in_cached": 0,
130 "cost_per_1m_out_cached": 0,
131 "context_window": 198000,
132 "default_max_tokens": 16384,
133 "can_reason": true,
134 "reasoning_levels": [
135 "low",
136 "medium",
137 "high"
138 ],
139 "default_reasoning_effort": "medium",
140 "supports_attachments": false
141 },
142 {
143 "id": "zai-org-glm-4.7",
144 "name": "GLM 4.7",
145 "cost_per_1m_in": 0.55,
146 "cost_per_1m_out": 2.65,
147 "cost_per_1m_in_cached": 0,
148 "cost_per_1m_out_cached": 0,
149 "context_window": 198000,
150 "default_max_tokens": 16384,
151 "can_reason": true,
152 "reasoning_levels": [
153 "low",
154 "medium",
155 "high"
156 ],
157 "default_reasoning_effort": "medium",
158 "supports_attachments": false
159 },
160 {
161 "id": "zai-org-glm-4.7-flash",
162 "name": "GLM 4.7 Flash",
163 "cost_per_1m_in": 0.125,
164 "cost_per_1m_out": 0.5,
165 "cost_per_1m_in_cached": 0,
166 "cost_per_1m_out_cached": 0,
167 "context_window": 128000,
168 "default_max_tokens": 16384,
169 "can_reason": true,
170 "reasoning_levels": [
171 "low",
172 "medium",
173 "high"
174 ],
175 "default_reasoning_effort": "medium",
176 "supports_attachments": false
177 },
178 {
179 "id": "olafangensan-glm-4.7-flash-heretic",
180 "name": "GLM 4.7 Flash Heretic",
181 "cost_per_1m_in": 0.14,
182 "cost_per_1m_out": 0.8,
183 "cost_per_1m_in_cached": 0,
184 "cost_per_1m_out_cached": 0,
185 "context_window": 200000,
186 "default_max_tokens": 24000,
187 "can_reason": true,
188 "reasoning_levels": [
189 "low",
190 "medium",
191 "high"
192 ],
193 "default_reasoning_effort": "medium",
194 "supports_attachments": false
195 },
196 {
197 "id": "zai-org-glm-5",
198 "name": "GLM 5",
199 "cost_per_1m_in": 1,
200 "cost_per_1m_out": 3.2,
201 "cost_per_1m_in_cached": 0,
202 "cost_per_1m_out_cached": 0,
203 "context_window": 198000,
204 "default_max_tokens": 32000,
205 "can_reason": true,
206 "reasoning_levels": [
207 "low",
208 "medium",
209 "high"
210 ],
211 "default_reasoning_effort": "medium",
212 "supports_attachments": false
213 },
214 {
215 "id": "z-ai-glm-5-turbo",
216 "name": "GLM 5 Turbo",
217 "cost_per_1m_in": 1.2,
218 "cost_per_1m_out": 4,
219 "cost_per_1m_in_cached": 0,
220 "cost_per_1m_out_cached": 0,
221 "context_window": 200000,
222 "default_max_tokens": 32768,
223 "can_reason": true,
224 "reasoning_levels": [
225 "low",
226 "medium",
227 "high"
228 ],
229 "default_reasoning_effort": "medium",
230 "supports_attachments": false
231 },
232 {
233 "id": "zai-org-glm-5-1",
234 "name": "GLM 5.1",
235 "cost_per_1m_in": 1.75,
236 "cost_per_1m_out": 5.5,
237 "cost_per_1m_in_cached": 0,
238 "cost_per_1m_out_cached": 0,
239 "context_window": 200000,
240 "default_max_tokens": 24000,
241 "can_reason": true,
242 "reasoning_levels": [
243 "low",
244 "medium",
245 "high"
246 ],
247 "default_reasoning_effort": "medium",
248 "supports_attachments": false
249 },
250 {
251 "id": "z-ai-glm-5v-turbo",
252 "name": "GLM 5V Turbo",
253 "cost_per_1m_in": 1.5,
254 "cost_per_1m_out": 5,
255 "cost_per_1m_in_cached": 0,
256 "cost_per_1m_out_cached": 0,
257 "context_window": 200000,
258 "default_max_tokens": 32768,
259 "can_reason": true,
260 "reasoning_levels": [
261 "low",
262 "medium",
263 "high"
264 ],
265 "default_reasoning_effort": "medium",
266 "supports_attachments": true
267 },
268 {
269 "id": "openai-gpt-4o-2024-11-20",
270 "name": "GPT-4o",
271 "cost_per_1m_in": 3.125,
272 "cost_per_1m_out": 12.5,
273 "cost_per_1m_in_cached": 0,
274 "cost_per_1m_out_cached": 0,
275 "context_window": 128000,
276 "default_max_tokens": 16384,
277 "can_reason": false,
278 "supports_attachments": true
279 },
280 {
281 "id": "openai-gpt-4o-mini-2024-07-18",
282 "name": "GPT-4o Mini",
283 "cost_per_1m_in": 0.1875,
284 "cost_per_1m_out": 0.75,
285 "cost_per_1m_in_cached": 0,
286 "cost_per_1m_out_cached": 0,
287 "context_window": 128000,
288 "default_max_tokens": 16384,
289 "can_reason": false,
290 "supports_attachments": true
291 },
292 {
293 "id": "openai-gpt-52",
294 "name": "GPT-5.2",
295 "cost_per_1m_in": 2.19,
296 "cost_per_1m_out": 17.5,
297 "cost_per_1m_in_cached": 0,
298 "cost_per_1m_out_cached": 0,
299 "context_window": 256000,
300 "default_max_tokens": 65536,
301 "can_reason": true,
302 "reasoning_levels": [
303 "low",
304 "medium",
305 "high"
306 ],
307 "default_reasoning_effort": "medium",
308 "supports_attachments": false
309 },
310 {
311 "id": "openai-gpt-52-codex",
312 "name": "GPT-5.2 Codex",
313 "cost_per_1m_in": 2.19,
314 "cost_per_1m_out": 17.5,
315 "cost_per_1m_in_cached": 0,
316 "cost_per_1m_out_cached": 0,
317 "context_window": 256000,
318 "default_max_tokens": 65536,
319 "can_reason": true,
320 "reasoning_levels": [
321 "low",
322 "medium",
323 "high"
324 ],
325 "default_reasoning_effort": "medium",
326 "supports_attachments": true
327 },
328 {
329 "id": "openai-gpt-53-codex",
330 "name": "GPT-5.3 Codex",
331 "cost_per_1m_in": 2.19,
332 "cost_per_1m_out": 17.5,
333 "cost_per_1m_in_cached": 0,
334 "cost_per_1m_out_cached": 0,
335 "context_window": 400000,
336 "default_max_tokens": 128000,
337 "can_reason": true,
338 "reasoning_levels": [
339 "low",
340 "medium",
341 "high"
342 ],
343 "default_reasoning_effort": "medium",
344 "supports_attachments": true
345 },
346 {
347 "id": "openai-gpt-54",
348 "name": "GPT-5.4",
349 "cost_per_1m_in": 3.13,
350 "cost_per_1m_out": 18.8,
351 "cost_per_1m_in_cached": 0,
352 "cost_per_1m_out_cached": 0,
353 "context_window": 1000000,
354 "default_max_tokens": 131072,
355 "can_reason": true,
356 "reasoning_levels": [
357 "low",
358 "medium",
359 "high"
360 ],
361 "default_reasoning_effort": "medium",
362 "supports_attachments": true
363 },
364 {
365 "id": "openai-gpt-54-mini",
366 "name": "GPT-5.4 Mini",
367 "cost_per_1m_in": 0.9375,
368 "cost_per_1m_out": 5.625,
369 "cost_per_1m_in_cached": 0,
370 "cost_per_1m_out_cached": 0,
371 "context_window": 400000,
372 "default_max_tokens": 128000,
373 "can_reason": true,
374 "reasoning_levels": [
375 "low",
376 "medium",
377 "high"
378 ],
379 "default_reasoning_effort": "medium",
380 "supports_attachments": true
381 },
382 {
383 "id": "openai-gpt-54-pro",
384 "name": "GPT-5.4 Pro",
385 "cost_per_1m_in": 37.5,
386 "cost_per_1m_out": 225,
387 "cost_per_1m_in_cached": 0,
388 "cost_per_1m_out_cached": 0,
389 "context_window": 1000000,
390 "default_max_tokens": 128000,
391 "can_reason": true,
392 "reasoning_levels": [
393 "low",
394 "medium",
395 "high"
396 ],
397 "default_reasoning_effort": "medium",
398 "supports_attachments": true
399 },
400 {
401 "id": "openai-gpt-55",
402 "name": "GPT-5.5",
403 "cost_per_1m_in": 6.25,
404 "cost_per_1m_out": 37.5,
405 "cost_per_1m_in_cached": 0,
406 "cost_per_1m_out_cached": 0,
407 "context_window": 1000000,
408 "default_max_tokens": 131072,
409 "can_reason": true,
410 "reasoning_levels": [
411 "low",
412 "medium",
413 "high"
414 ],
415 "default_reasoning_effort": "medium",
416 "supports_attachments": true
417 },
418 {
419 "id": "openai-gpt-55-pro",
420 "name": "GPT-5.5 Pro",
421 "cost_per_1m_in": 37.5,
422 "cost_per_1m_out": 225,
423 "cost_per_1m_in_cached": 0,
424 "cost_per_1m_out_cached": 0,
425 "context_window": 1000000,
426 "default_max_tokens": 128000,
427 "can_reason": true,
428 "reasoning_levels": [
429 "low",
430 "medium",
431 "high"
432 ],
433 "default_reasoning_effort": "medium",
434 "supports_attachments": true
435 },
436 {
437 "id": "gemini-3-flash-preview",
438 "name": "Gemini 3 Flash Preview",
439 "cost_per_1m_in": 0.7,
440 "cost_per_1m_out": 3.75,
441 "cost_per_1m_in_cached": 0,
442 "cost_per_1m_out_cached": 0,
443 "context_window": 256000,
444 "default_max_tokens": 65536,
445 "can_reason": true,
446 "reasoning_levels": [
447 "low",
448 "medium",
449 "high"
450 ],
451 "default_reasoning_effort": "medium",
452 "supports_attachments": true
453 },
454 {
455 "id": "gemini-3-1-pro-preview",
456 "name": "Gemini 3.1 Pro Preview",
457 "cost_per_1m_in": 2.5,
458 "cost_per_1m_out": 15,
459 "cost_per_1m_in_cached": 0,
460 "cost_per_1m_out_cached": 0,
461 "context_window": 1000000,
462 "default_max_tokens": 32768,
463 "can_reason": true,
464 "reasoning_levels": [
465 "low",
466 "medium",
467 "high"
468 ],
469 "default_reasoning_effort": "medium",
470 "supports_attachments": true
471 },
472 {
473 "id": "gemma-4-uncensored",
474 "name": "Gemma 4 Uncensored",
475 "cost_per_1m_in": 0.1625,
476 "cost_per_1m_out": 0.5,
477 "cost_per_1m_in_cached": 0,
478 "cost_per_1m_out_cached": 0,
479 "context_window": 256000,
480 "default_max_tokens": 8192,
481 "can_reason": false,
482 "supports_attachments": true
483 },
484 {
485 "id": "google-gemma-3-27b-it",
486 "name": "Google Gemma 3 27B Instruct",
487 "cost_per_1m_in": 0.12,
488 "cost_per_1m_out": 0.2,
489 "cost_per_1m_in_cached": 0,
490 "cost_per_1m_out_cached": 0,
491 "context_window": 198000,
492 "default_max_tokens": 16384,
493 "can_reason": false,
494 "supports_attachments": true
495 },
496 {
497 "id": "google-gemma-4-26b-a4b-it",
498 "name": "Google Gemma 4 26B A4B Instruct",
499 "cost_per_1m_in": 0.1625,
500 "cost_per_1m_out": 0.5,
501 "cost_per_1m_in_cached": 0,
502 "cost_per_1m_out_cached": 0,
503 "context_window": 256000,
504 "default_max_tokens": 8192,
505 "can_reason": true,
506 "reasoning_levels": [
507 "low",
508 "medium",
509 "high"
510 ],
511 "default_reasoning_effort": "medium",
512 "supports_attachments": true
513 },
514 {
515 "id": "google-gemma-4-31b-it",
516 "name": "Google Gemma 4 31B Instruct",
517 "cost_per_1m_in": 0.175,
518 "cost_per_1m_out": 0.5,
519 "cost_per_1m_in_cached": 0,
520 "cost_per_1m_out_cached": 0,
521 "context_window": 256000,
522 "default_max_tokens": 8192,
523 "can_reason": true,
524 "reasoning_levels": [
525 "low",
526 "medium",
527 "high"
528 ],
529 "default_reasoning_effort": "medium",
530 "supports_attachments": true
531 },
532 {
533 "id": "grok-41-fast",
534 "name": "Grok 4.1 Fast",
535 "cost_per_1m_in": 0.23,
536 "cost_per_1m_out": 0.57,
537 "cost_per_1m_in_cached": 0,
538 "cost_per_1m_out_cached": 0,
539 "context_window": 1000000,
540 "default_max_tokens": 30000,
541 "can_reason": true,
542 "supports_attachments": true
543 },
544 {
545 "id": "grok-4-20",
546 "name": "Grok 4.20",
547 "cost_per_1m_in": 1.42,
548 "cost_per_1m_out": 2.83,
549 "cost_per_1m_in_cached": 0,
550 "cost_per_1m_out_cached": 0,
551 "context_window": 2000000,
552 "default_max_tokens": 128000,
553 "can_reason": true,
554 "supports_attachments": true
555 },
556 {
557 "id": "grok-4-3",
558 "name": "Grok 4.3",
559 "cost_per_1m_in": 1.42,
560 "cost_per_1m_out": 2.83,
561 "cost_per_1m_in_cached": 0,
562 "cost_per_1m_out_cached": 0,
563 "context_window": 1000000,
564 "default_max_tokens": 32000,
565 "can_reason": true,
566 "supports_attachments": true
567 },
568 {
569 "id": "kimi-k2-5",
570 "name": "Kimi K2.5",
571 "cost_per_1m_in": 0.56,
572 "cost_per_1m_out": 3.5,
573 "cost_per_1m_in_cached": 0,
574 "cost_per_1m_out_cached": 0,
575 "context_window": 256000,
576 "default_max_tokens": 65536,
577 "can_reason": true,
578 "reasoning_levels": [
579 "low",
580 "medium",
581 "high"
582 ],
583 "default_reasoning_effort": "medium",
584 "supports_attachments": true
585 },
586 {
587 "id": "kimi-k2-6",
588 "name": "Kimi K2.6",
589 "cost_per_1m_in": 0.85,
590 "cost_per_1m_out": 4.655,
591 "cost_per_1m_in_cached": 0,
592 "cost_per_1m_out_cached": 0,
593 "context_window": 256000,
594 "default_max_tokens": 65536,
595 "can_reason": true,
596 "reasoning_levels": [
597 "low",
598 "medium",
599 "high"
600 ],
601 "default_reasoning_effort": "medium",
602 "supports_attachments": true
603 },
604 {
605 "id": "llama-3.2-3b",
606 "name": "Llama 3.2 3B",
607 "cost_per_1m_in": 0.15,
608 "cost_per_1m_out": 0.6,
609 "cost_per_1m_in_cached": 0,
610 "cost_per_1m_out_cached": 0,
611 "context_window": 128000,
612 "default_max_tokens": 4096,
613 "can_reason": false,
614 "supports_attachments": false
615 },
616 {
617 "id": "llama-3.3-70b",
618 "name": "Llama 3.3 70B",
619 "cost_per_1m_in": 0.7,
620 "cost_per_1m_out": 2.8,
621 "cost_per_1m_in_cached": 0,
622 "cost_per_1m_out_cached": 0,
623 "context_window": 128000,
624 "default_max_tokens": 4096,
625 "can_reason": false,
626 "supports_attachments": false
627 },
628 {
629 "id": "mercury-2",
630 "name": "Mercury 2",
631 "cost_per_1m_in": 0.3125,
632 "cost_per_1m_out": 0.9375,
633 "cost_per_1m_in_cached": 0,
634 "cost_per_1m_out_cached": 0,
635 "context_window": 128000,
636 "default_max_tokens": 50000,
637 "can_reason": true,
638 "reasoning_levels": [
639 "low",
640 "medium",
641 "high"
642 ],
643 "default_reasoning_effort": "medium",
644 "supports_attachments": false
645 },
646 {
647 "id": "minimax-m25",
648 "name": "MiniMax M2.5",
649 "cost_per_1m_in": 0.34,
650 "cost_per_1m_out": 1.19,
651 "cost_per_1m_in_cached": 0,
652 "cost_per_1m_out_cached": 0,
653 "context_window": 198000,
654 "default_max_tokens": 32768,
655 "can_reason": true,
656 "reasoning_levels": [
657 "low",
658 "medium",
659 "high"
660 ],
661 "default_reasoning_effort": "medium",
662 "supports_attachments": false
663 },
664 {
665 "id": "minimax-m27",
666 "name": "MiniMax M2.7",
667 "cost_per_1m_in": 0.375,
668 "cost_per_1m_out": 1.5,
669 "cost_per_1m_in_cached": 0,
670 "cost_per_1m_out_cached": 0,
671 "context_window": 198000,
672 "default_max_tokens": 32768,
673 "can_reason": true,
674 "reasoning_levels": [
675 "low",
676 "medium",
677 "high"
678 ],
679 "default_reasoning_effort": "medium",
680 "supports_attachments": false
681 },
682 {
683 "id": "mistral-small-3-2-24b-instruct",
684 "name": "Mistral Small 3.2 24B Instruct",
685 "cost_per_1m_in": 0.09375,
686 "cost_per_1m_out": 0.25,
687 "cost_per_1m_in_cached": 0,
688 "cost_per_1m_out_cached": 0,
689 "context_window": 256000,
690 "default_max_tokens": 16384,
691 "can_reason": false,
692 "supports_attachments": false
693 },
694 {
695 "id": "mistral-small-2603",
696 "name": "Mistral Small 4",
697 "cost_per_1m_in": 0.1875,
698 "cost_per_1m_out": 0.75,
699 "cost_per_1m_in_cached": 0,
700 "cost_per_1m_out_cached": 0,
701 "context_window": 256000,
702 "default_max_tokens": 65536,
703 "can_reason": true,
704 "reasoning_levels": [
705 "low",
706 "medium",
707 "high"
708 ],
709 "default_reasoning_effort": "medium",
710 "supports_attachments": true
711 },
712 {
713 "id": "nvidia-nemotron-3-nano-30b-a3b",
714 "name": "NVIDIA Nemotron 3 Nano 30B",
715 "cost_per_1m_in": 0.075,
716 "cost_per_1m_out": 0.3,
717 "cost_per_1m_in_cached": 0,
718 "cost_per_1m_out_cached": 0,
719 "context_window": 128000,
720 "default_max_tokens": 16384,
721 "can_reason": false,
722 "supports_attachments": false
723 },
724 {
725 "id": "nvidia-nemotron-cascade-2-30b-a3b",
726 "name": "Nemotron Cascade 2 30B A3B",
727 "cost_per_1m_in": 0.14,
728 "cost_per_1m_out": 0.8,
729 "cost_per_1m_in_cached": 0,
730 "cost_per_1m_out_cached": 0,
731 "context_window": 256000,
732 "default_max_tokens": 32768,
733 "can_reason": true,
734 "reasoning_levels": [
735 "low",
736 "medium",
737 "high"
738 ],
739 "default_reasoning_effort": "medium",
740 "supports_attachments": false
741 },
742 {
743 "id": "openai-gpt-oss-120b",
744 "name": "OpenAI GPT OSS 120B",
745 "cost_per_1m_in": 0.07,
746 "cost_per_1m_out": 0.3,
747 "cost_per_1m_in_cached": 0,
748 "cost_per_1m_out_cached": 0,
749 "context_window": 128000,
750 "default_max_tokens": 16384,
751 "can_reason": true,
752 "reasoning_levels": [
753 "low",
754 "medium",
755 "high"
756 ],
757 "default_reasoning_effort": "medium",
758 "supports_attachments": false
759 },
760 {
761 "id": "qwen3-235b-a22b-instruct-2507",
762 "name": "Qwen 3 235B A22B Instruct 2507",
763 "cost_per_1m_in": 0.15,
764 "cost_per_1m_out": 0.75,
765 "cost_per_1m_in_cached": 0,
766 "cost_per_1m_out_cached": 0,
767 "context_window": 128000,
768 "default_max_tokens": 16384,
769 "can_reason": false,
770 "supports_attachments": false
771 },
772 {
773 "id": "qwen3-235b-a22b-thinking-2507",
774 "name": "Qwen 3 235B A22B Thinking 2507",
775 "cost_per_1m_in": 0.45,
776 "cost_per_1m_out": 3.5,
777 "cost_per_1m_in_cached": 0,
778 "cost_per_1m_out_cached": 0,
779 "context_window": 128000,
780 "default_max_tokens": 16384,
781 "can_reason": true,
782 "reasoning_levels": [
783 "low",
784 "medium",
785 "high"
786 ],
787 "default_reasoning_effort": "medium",
788 "supports_attachments": false
789 },
790 {
791 "id": "qwen3-coder-480b-a35b-instruct-turbo",
792 "name": "Qwen 3 Coder 480B Turbo",
793 "cost_per_1m_in": 0.35,
794 "cost_per_1m_out": 1.5,
795 "cost_per_1m_in_cached": 0,
796 "cost_per_1m_out_cached": 0,
797 "context_window": 256000,
798 "default_max_tokens": 65536,
799 "can_reason": false,
800 "supports_attachments": false
801 },
802 {
803 "id": "qwen3-next-80b",
804 "name": "Qwen 3 Next 80b",
805 "cost_per_1m_in": 0.35,
806 "cost_per_1m_out": 1.9,
807 "cost_per_1m_in_cached": 0,
808 "cost_per_1m_out_cached": 0,
809 "context_window": 256000,
810 "default_max_tokens": 16384,
811 "can_reason": false,
812 "supports_attachments": false
813 },
814 {
815 "id": "qwen3-5-35b-a3b",
816 "name": "Qwen 3.5 35B A3B",
817 "cost_per_1m_in": 0.3125,
818 "cost_per_1m_out": 1.25,
819 "cost_per_1m_in_cached": 0,
820 "cost_per_1m_out_cached": 0,
821 "context_window": 256000,
822 "default_max_tokens": 65536,
823 "can_reason": true,
824 "reasoning_levels": [
825 "low",
826 "medium",
827 "high"
828 ],
829 "default_reasoning_effort": "medium",
830 "supports_attachments": true,
831 "options": {
832 "temperature": 1,
833 "top_p": 0.95
834 }
835 },
836 {
837 "id": "qwen3-5-397b-a17b",
838 "name": "Qwen 3.5 397B",
839 "cost_per_1m_in": 0.75,
840 "cost_per_1m_out": 4.5,
841 "cost_per_1m_in_cached": 0,
842 "cost_per_1m_out_cached": 0,
843 "context_window": 128000,
844 "default_max_tokens": 32768,
845 "can_reason": true,
846 "reasoning_levels": [
847 "low",
848 "medium",
849 "high"
850 ],
851 "default_reasoning_effort": "medium",
852 "supports_attachments": true
853 },
854 {
855 "id": "qwen3-5-9b",
856 "name": "Qwen 3.5 9B",
857 "cost_per_1m_in": 0.1,
858 "cost_per_1m_out": 0.15,
859 "cost_per_1m_in_cached": 0,
860 "cost_per_1m_out_cached": 0,
861 "context_window": 256000,
862 "default_max_tokens": 32768,
863 "can_reason": true,
864 "reasoning_levels": [
865 "low",
866 "medium",
867 "high"
868 ],
869 "default_reasoning_effort": "medium",
870 "supports_attachments": true
871 },
872 {
873 "id": "qwen3-6-27b",
874 "name": "Qwen 3.6 27B",
875 "cost_per_1m_in": 0.325,
876 "cost_per_1m_out": 3.25,
877 "cost_per_1m_in_cached": 0,
878 "cost_per_1m_out_cached": 0,
879 "context_window": 256000,
880 "default_max_tokens": 65536,
881 "can_reason": true,
882 "reasoning_levels": [
883 "low",
884 "medium",
885 "high"
886 ],
887 "default_reasoning_effort": "medium",
888 "supports_attachments": true,
889 "options": {
890 "temperature": 1,
891 "top_p": 0.95
892 }
893 },
894 {
895 "id": "qwen-3-6-plus",
896 "name": "Qwen 3.6 Plus Uncensored",
897 "cost_per_1m_in": 0.625,
898 "cost_per_1m_out": 3.75,
899 "cost_per_1m_in_cached": 0,
900 "cost_per_1m_out_cached": 0,
901 "context_window": 1000000,
902 "default_max_tokens": 65536,
903 "can_reason": true,
904 "supports_attachments": true,
905 "options": {
906 "temperature": 0.7,
907 "top_p": 0.8
908 }
909 },
910 {
911 "id": "e2ee-qwen3-30b-a3b-p",
912 "name": "Qwen3 30B A3B",
913 "cost_per_1m_in": 0.19,
914 "cost_per_1m_out": 0.69,
915 "cost_per_1m_in_cached": 0,
916 "cost_per_1m_out_cached": 0,
917 "context_window": 256000,
918 "default_max_tokens": 32768,
919 "can_reason": false,
920 "supports_attachments": false
921 },
922 {
923 "id": "qwen3-vl-235b-a22b",
924 "name": "Qwen3 VL 235B",
925 "cost_per_1m_in": 0.25,
926 "cost_per_1m_out": 1.5,
927 "cost_per_1m_in_cached": 0,
928 "cost_per_1m_out_cached": 0,
929 "context_window": 256000,
930 "default_max_tokens": 16384,
931 "can_reason": false,
932 "supports_attachments": true
933 },
934 {
935 "id": "e2ee-qwen3-vl-30b-a3b-p",
936 "name": "Qwen3 VL 30B A3B",
937 "cost_per_1m_in": 0.25,
938 "cost_per_1m_out": 0.9,
939 "cost_per_1m_in_cached": 0,
940 "cost_per_1m_out_cached": 0,
941 "context_window": 128000,
942 "default_max_tokens": 4096,
943 "can_reason": false,
944 "supports_attachments": true
945 },
946 {
947 "id": "e2ee-qwen3-5-122b-a10b",
948 "name": "Qwen3.5 122B A10B",
949 "cost_per_1m_in": 0.5,
950 "cost_per_1m_out": 4,
951 "cost_per_1m_in_cached": 0,
952 "cost_per_1m_out_cached": 0,
953 "context_window": 128000,
954 "default_max_tokens": 32768,
955 "can_reason": true,
956 "supports_attachments": true
957 },
958 {
959 "id": "arcee-trinity-large-thinking",
960 "name": "Trinity Large Thinking",
961 "cost_per_1m_in": 0.3125,
962 "cost_per_1m_out": 1.125,
963 "cost_per_1m_in_cached": 0,
964 "cost_per_1m_out_cached": 0,
965 "context_window": 256000,
966 "default_max_tokens": 65536,
967 "can_reason": true,
968 "reasoning_levels": [
969 "low",
970 "medium",
971 "high"
972 ],
973 "default_reasoning_effort": "medium",
974 "supports_attachments": false
975 },
976 {
977 "id": "venice-uncensored-role-play",
978 "name": "Venice Role Play Uncensored",
979 "cost_per_1m_in": 0.5,
980 "cost_per_1m_out": 2,
981 "cost_per_1m_in_cached": 0,
982 "cost_per_1m_out_cached": 0,
983 "context_window": 128000,
984 "default_max_tokens": 4096,
985 "can_reason": false,
986 "supports_attachments": true
987 },
988 {
989 "id": "venice-uncensored-1-2",
990 "name": "Venice Uncensored 1.2",
991 "cost_per_1m_in": 0.2,
992 "cost_per_1m_out": 0.9,
993 "cost_per_1m_in_cached": 0,
994 "cost_per_1m_out_cached": 0,
995 "context_window": 128000,
996 "default_max_tokens": 8192,
997 "can_reason": false,
998 "supports_attachments": true
999 }
1000 ]
1001}