venice.json

  1{
  2  "name": "Venice AI",
  3  "id": "venice",
  4  "api_key": "$VENICE_API_KEY",
  5  "api_endpoint": "https://api.venice.ai/api/v1",
  6  "type": "openai-compat",
  7  "default_large_model_id": "claude-opus-4-6",
  8  "default_small_model_id": "minimax-m25",
  9  "models": [
 10    {
 11      "id": "claude-opus-4-5",
 12      "name": "Claude Opus 4.5",
 13      "cost_per_1m_in": 6,
 14      "cost_per_1m_out": 30,
 15      "cost_per_1m_in_cached": 0,
 16      "cost_per_1m_out_cached": 0,
 17      "context_window": 198000,
 18      "default_max_tokens": 32768,
 19      "can_reason": true,
 20      "reasoning_levels": [
 21        "low",
 22        "medium",
 23        "high"
 24      ],
 25      "default_reasoning_effort": "medium",
 26      "supports_attachments": true,
 27      "options": {}
 28    },
 29    {
 30      "id": "claude-opus-4-6",
 31      "name": "Claude Opus 4.6",
 32      "cost_per_1m_in": 6,
 33      "cost_per_1m_out": 30,
 34      "cost_per_1m_in_cached": 0,
 35      "cost_per_1m_out_cached": 0,
 36      "context_window": 1000000,
 37      "default_max_tokens": 32768,
 38      "can_reason": true,
 39      "reasoning_levels": [
 40        "low",
 41        "medium",
 42        "high"
 43      ],
 44      "default_reasoning_effort": "medium",
 45      "supports_attachments": true,
 46      "options": {}
 47    },
 48    {
 49      "id": "claude-sonnet-4-5",
 50      "name": "Claude Sonnet 4.5",
 51      "cost_per_1m_in": 3.75,
 52      "cost_per_1m_out": 18.75,
 53      "cost_per_1m_in_cached": 0,
 54      "cost_per_1m_out_cached": 0,
 55      "context_window": 198000,
 56      "default_max_tokens": 32768,
 57      "can_reason": true,
 58      "reasoning_levels": [
 59        "low",
 60        "medium",
 61        "high"
 62      ],
 63      "default_reasoning_effort": "medium",
 64      "supports_attachments": true,
 65      "options": {}
 66    },
 67    {
 68      "id": "claude-sonnet-4-6",
 69      "name": "Claude Sonnet 4.6",
 70      "cost_per_1m_in": 3.6,
 71      "cost_per_1m_out": 18,
 72      "cost_per_1m_in_cached": 0,
 73      "cost_per_1m_out_cached": 0,
 74      "context_window": 1000000,
 75      "default_max_tokens": 32768,
 76      "can_reason": true,
 77      "reasoning_levels": [
 78        "low",
 79        "medium",
 80        "high"
 81      ],
 82      "default_reasoning_effort": "medium",
 83      "supports_attachments": true,
 84      "options": {}
 85    },
 86    {
 87      "id": "zai-org-glm-4.6",
 88      "name": "GLM 4.6",
 89      "cost_per_1m_in": 0.85,
 90      "cost_per_1m_out": 2.75,
 91      "cost_per_1m_in_cached": 0,
 92      "cost_per_1m_out_cached": 0,
 93      "context_window": 198000,
 94      "default_max_tokens": 32768,
 95      "can_reason": false,
 96      "supports_attachments": false,
 97      "options": {}
 98    },
 99    {
100      "id": "zai-org-glm-4.7",
101      "name": "GLM 4.7",
102      "cost_per_1m_in": 0.55,
103      "cost_per_1m_out": 2.65,
104      "cost_per_1m_in_cached": 0,
105      "cost_per_1m_out_cached": 0,
106      "context_window": 198000,
107      "default_max_tokens": 32768,
108      "can_reason": true,
109      "reasoning_levels": [
110        "low",
111        "medium",
112        "high"
113      ],
114      "default_reasoning_effort": "medium",
115      "supports_attachments": false,
116      "options": {}
117    },
118    {
119      "id": "zai-org-glm-4.7-flash",
120      "name": "GLM 4.7 Flash",
121      "cost_per_1m_in": 0.125,
122      "cost_per_1m_out": 0.5,
123      "cost_per_1m_in_cached": 0,
124      "cost_per_1m_out_cached": 0,
125      "context_window": 128000,
126      "default_max_tokens": 32000,
127      "can_reason": true,
128      "reasoning_levels": [
129        "low",
130        "medium",
131        "high"
132      ],
133      "default_reasoning_effort": "medium",
134      "supports_attachments": false,
135      "options": {}
136    },
137    {
138      "id": "olafangensan-glm-4.7-flash-heretic",
139      "name": "GLM 4.7 Flash Heretic",
140      "cost_per_1m_in": 0.14,
141      "cost_per_1m_out": 0.8,
142      "cost_per_1m_in_cached": 0,
143      "cost_per_1m_out_cached": 0,
144      "context_window": 200000,
145      "default_max_tokens": 32768,
146      "can_reason": true,
147      "reasoning_levels": [
148        "low",
149        "medium",
150        "high"
151      ],
152      "default_reasoning_effort": "medium",
153      "supports_attachments": false,
154      "options": {}
155    },
156    {
157      "id": "zai-org-glm-5",
158      "name": "GLM 5",
159      "cost_per_1m_in": 1,
160      "cost_per_1m_out": 3.2,
161      "cost_per_1m_in_cached": 0,
162      "cost_per_1m_out_cached": 0,
163      "context_window": 198000,
164      "default_max_tokens": 32768,
165      "can_reason": true,
166      "reasoning_levels": [
167        "low",
168        "medium",
169        "high"
170      ],
171      "default_reasoning_effort": "medium",
172      "supports_attachments": false,
173      "options": {}
174    },
175    {
176      "id": "openai-gpt-4o-2024-11-20",
177      "name": "GPT-4o",
178      "cost_per_1m_in": 3.125,
179      "cost_per_1m_out": 12.5,
180      "cost_per_1m_in_cached": 0,
181      "cost_per_1m_out_cached": 0,
182      "context_window": 128000,
183      "default_max_tokens": 32000,
184      "can_reason": false,
185      "supports_attachments": true,
186      "options": {}
187    },
188    {
189      "id": "openai-gpt-4o-mini-2024-07-18",
190      "name": "GPT-4o Mini",
191      "cost_per_1m_in": 0.1875,
192      "cost_per_1m_out": 0.75,
193      "cost_per_1m_in_cached": 0,
194      "cost_per_1m_out_cached": 0,
195      "context_window": 128000,
196      "default_max_tokens": 32000,
197      "can_reason": false,
198      "supports_attachments": true,
199      "options": {}
200    },
201    {
202      "id": "openai-gpt-52",
203      "name": "GPT-5.2",
204      "cost_per_1m_in": 2.19,
205      "cost_per_1m_out": 17.5,
206      "cost_per_1m_in_cached": 0,
207      "cost_per_1m_out_cached": 0,
208      "context_window": 256000,
209      "default_max_tokens": 32768,
210      "can_reason": true,
211      "reasoning_levels": [
212        "low",
213        "medium",
214        "high"
215      ],
216      "default_reasoning_effort": "medium",
217      "supports_attachments": false,
218      "options": {}
219    },
220    {
221      "id": "openai-gpt-52-codex",
222      "name": "GPT-5.2 Codex",
223      "cost_per_1m_in": 2.19,
224      "cost_per_1m_out": 17.5,
225      "cost_per_1m_in_cached": 0,
226      "cost_per_1m_out_cached": 0,
227      "context_window": 256000,
228      "default_max_tokens": 32768,
229      "can_reason": true,
230      "reasoning_levels": [
231        "low",
232        "medium",
233        "high"
234      ],
235      "default_reasoning_effort": "medium",
236      "supports_attachments": true,
237      "options": {}
238    },
239    {
240      "id": "openai-gpt-53-codex",
241      "name": "GPT-5.3 Codex",
242      "cost_per_1m_in": 2.19,
243      "cost_per_1m_out": 17.5,
244      "cost_per_1m_in_cached": 0,
245      "cost_per_1m_out_cached": 0,
246      "context_window": 400000,
247      "default_max_tokens": 32768,
248      "can_reason": true,
249      "reasoning_levels": [
250        "low",
251        "medium",
252        "high"
253      ],
254      "default_reasoning_effort": "medium",
255      "supports_attachments": true,
256      "options": {}
257    },
258    {
259      "id": "openai-gpt-54",
260      "name": "GPT-5.4",
261      "cost_per_1m_in": 3.13,
262      "cost_per_1m_out": 18.8,
263      "cost_per_1m_in_cached": 0,
264      "cost_per_1m_out_cached": 0,
265      "context_window": 1000000,
266      "default_max_tokens": 32768,
267      "can_reason": true,
268      "reasoning_levels": [
269        "low",
270        "medium",
271        "high"
272      ],
273      "default_reasoning_effort": "medium",
274      "supports_attachments": true,
275      "options": {}
276    },
277    {
278      "id": "openai-gpt-54-pro",
279      "name": "GPT-5.4 Pro",
280      "cost_per_1m_in": 37.5,
281      "cost_per_1m_out": 225,
282      "cost_per_1m_in_cached": 0,
283      "cost_per_1m_out_cached": 0,
284      "context_window": 1000000,
285      "default_max_tokens": 32768,
286      "can_reason": true,
287      "reasoning_levels": [
288        "low",
289        "medium",
290        "high"
291      ],
292      "default_reasoning_effort": "medium",
293      "supports_attachments": true,
294      "options": {}
295    },
296    {
297      "id": "gemini-3-flash-preview",
298      "name": "Gemini 3 Flash Preview",
299      "cost_per_1m_in": 0.7,
300      "cost_per_1m_out": 3.75,
301      "cost_per_1m_in_cached": 0,
302      "cost_per_1m_out_cached": 0,
303      "context_window": 256000,
304      "default_max_tokens": 32768,
305      "can_reason": true,
306      "reasoning_levels": [
307        "low",
308        "medium",
309        "high"
310      ],
311      "default_reasoning_effort": "medium",
312      "supports_attachments": true,
313      "options": {}
314    },
315    {
316      "id": "gemini-3-pro-preview",
317      "name": "Gemini 3 Pro Preview",
318      "cost_per_1m_in": 2.5,
319      "cost_per_1m_out": 15,
320      "cost_per_1m_in_cached": 0,
321      "cost_per_1m_out_cached": 0,
322      "context_window": 198000,
323      "default_max_tokens": 32768,
324      "can_reason": true,
325      "reasoning_levels": [
326        "low",
327        "medium",
328        "high"
329      ],
330      "default_reasoning_effort": "medium",
331      "supports_attachments": true,
332      "options": {}
333    },
334    {
335      "id": "gemini-3-1-pro-preview",
336      "name": "Gemini 3.1 Pro Preview",
337      "cost_per_1m_in": 2.5,
338      "cost_per_1m_out": 15,
339      "cost_per_1m_in_cached": 0,
340      "cost_per_1m_out_cached": 0,
341      "context_window": 1000000,
342      "default_max_tokens": 32768,
343      "can_reason": true,
344      "reasoning_levels": [
345        "low",
346        "medium",
347        "high"
348      ],
349      "default_reasoning_effort": "medium",
350      "supports_attachments": true,
351      "options": {}
352    },
353    {
354      "id": "google-gemma-3-27b-it",
355      "name": "Google Gemma 3 27B Instruct",
356      "cost_per_1m_in": 0.12,
357      "cost_per_1m_out": 0.2,
358      "cost_per_1m_in_cached": 0,
359      "cost_per_1m_out_cached": 0,
360      "context_window": 198000,
361      "default_max_tokens": 32768,
362      "can_reason": false,
363      "supports_attachments": true,
364      "options": {}
365    },
366    {
367      "id": "grok-41-fast",
368      "name": "Grok 4.1 Fast",
369      "cost_per_1m_in": 0.25,
370      "cost_per_1m_out": 0.625,
371      "cost_per_1m_in_cached": 0,
372      "cost_per_1m_out_cached": 0,
373      "context_window": 1000000,
374      "default_max_tokens": 32768,
375      "can_reason": true,
376      "reasoning_levels": [
377        "low",
378        "medium",
379        "high"
380      ],
381      "default_reasoning_effort": "medium",
382      "supports_attachments": true,
383      "options": {}
384    },
385    {
386      "id": "grok-4-20-beta",
387      "name": "Grok 4.20 Beta",
388      "cost_per_1m_in": 2.5,
389      "cost_per_1m_out": 7.5,
390      "cost_per_1m_in_cached": 0,
391      "cost_per_1m_out_cached": 0,
392      "context_window": 2000000,
393      "default_max_tokens": 32768,
394      "can_reason": true,
395      "reasoning_levels": [
396        "low",
397        "medium",
398        "high"
399      ],
400      "default_reasoning_effort": "medium",
401      "supports_attachments": true,
402      "options": {}
403    },
404    {
405      "id": "grok-code-fast-1",
406      "name": "Grok Code Fast 1",
407      "cost_per_1m_in": 0.25,
408      "cost_per_1m_out": 1.87,
409      "cost_per_1m_in_cached": 0,
410      "cost_per_1m_out_cached": 0,
411      "context_window": 256000,
412      "default_max_tokens": 32768,
413      "can_reason": true,
414      "reasoning_levels": [
415        "low",
416        "medium",
417        "high"
418      ],
419      "default_reasoning_effort": "medium",
420      "supports_attachments": false,
421      "options": {}
422    },
423    {
424      "id": "kimi-k2-thinking",
425      "name": "Kimi K2 Thinking",
426      "cost_per_1m_in": 0.75,
427      "cost_per_1m_out": 3.2,
428      "cost_per_1m_in_cached": 0,
429      "cost_per_1m_out_cached": 0,
430      "context_window": 256000,
431      "default_max_tokens": 32768,
432      "can_reason": true,
433      "reasoning_levels": [
434        "low",
435        "medium",
436        "high"
437      ],
438      "default_reasoning_effort": "medium",
439      "supports_attachments": false,
440      "options": {}
441    },
442    {
443      "id": "kimi-k2-5",
444      "name": "Kimi K2.5",
445      "cost_per_1m_in": 0.56,
446      "cost_per_1m_out": 3.5,
447      "cost_per_1m_in_cached": 0,
448      "cost_per_1m_out_cached": 0,
449      "context_window": 256000,
450      "default_max_tokens": 32768,
451      "can_reason": true,
452      "reasoning_levels": [
453        "low",
454        "medium",
455        "high"
456      ],
457      "default_reasoning_effort": "medium",
458      "supports_attachments": true,
459      "options": {}
460    },
461    {
462      "id": "llama-3.2-3b",
463      "name": "Llama 3.2 3B",
464      "cost_per_1m_in": 0.15,
465      "cost_per_1m_out": 0.6,
466      "cost_per_1m_in_cached": 0,
467      "cost_per_1m_out_cached": 0,
468      "context_window": 128000,
469      "default_max_tokens": 32000,
470      "can_reason": false,
471      "supports_attachments": false,
472      "options": {}
473    },
474    {
475      "id": "llama-3.3-70b",
476      "name": "Llama 3.3 70B",
477      "cost_per_1m_in": 0.7,
478      "cost_per_1m_out": 2.8,
479      "cost_per_1m_in_cached": 0,
480      "cost_per_1m_out_cached": 0,
481      "context_window": 128000,
482      "default_max_tokens": 32000,
483      "can_reason": false,
484      "supports_attachments": false,
485      "options": {}
486    },
487    {
488      "id": "minimax-m21",
489      "name": "MiniMax M2.1",
490      "cost_per_1m_in": 0.35,
491      "cost_per_1m_out": 1.5,
492      "cost_per_1m_in_cached": 0,
493      "cost_per_1m_out_cached": 0,
494      "context_window": 198000,
495      "default_max_tokens": 32768,
496      "can_reason": true,
497      "reasoning_levels": [
498        "low",
499        "medium",
500        "high"
501      ],
502      "default_reasoning_effort": "medium",
503      "supports_attachments": false,
504      "options": {}
505    },
506    {
507      "id": "minimax-m25",
508      "name": "MiniMax M2.5",
509      "cost_per_1m_in": 0.34,
510      "cost_per_1m_out": 1.19,
511      "cost_per_1m_in_cached": 0,
512      "cost_per_1m_out_cached": 0,
513      "context_window": 198000,
514      "default_max_tokens": 32768,
515      "can_reason": true,
516      "reasoning_levels": [
517        "low",
518        "medium",
519        "high"
520      ],
521      "default_reasoning_effort": "medium",
522      "supports_attachments": false,
523      "options": {}
524    },
525    {
526      "id": "mistral-small-3-2-24b-instruct",
527      "name": "Mistral Small 3.2 24B Instruct",
528      "cost_per_1m_in": 0.09375,
529      "cost_per_1m_out": 0.25,
530      "cost_per_1m_in_cached": 0,
531      "cost_per_1m_out_cached": 0,
532      "context_window": 256000,
533      "default_max_tokens": 32768,
534      "can_reason": false,
535      "supports_attachments": false,
536      "options": {}
537    },
538    {
539      "id": "nvidia-nemotron-3-nano-30b-a3b",
540      "name": "NVIDIA Nemotron 3 Nano 30B",
541      "cost_per_1m_in": 0.075,
542      "cost_per_1m_out": 0.3,
543      "cost_per_1m_in_cached": 0,
544      "cost_per_1m_out_cached": 0,
545      "context_window": 128000,
546      "default_max_tokens": 32000,
547      "can_reason": false,
548      "supports_attachments": false,
549      "options": {}
550    },
551    {
552      "id": "openai-gpt-oss-120b",
553      "name": "OpenAI GPT OSS 120B",
554      "cost_per_1m_in": 0.07,
555      "cost_per_1m_out": 0.3,
556      "cost_per_1m_in_cached": 0,
557      "cost_per_1m_out_cached": 0,
558      "context_window": 128000,
559      "default_max_tokens": 32000,
560      "can_reason": false,
561      "supports_attachments": false,
562      "options": {}
563    },
564    {
565      "id": "qwen3-235b-a22b-instruct-2507",
566      "name": "Qwen 3 235B A22B Instruct 2507",
567      "cost_per_1m_in": 0.15,
568      "cost_per_1m_out": 0.75,
569      "cost_per_1m_in_cached": 0,
570      "cost_per_1m_out_cached": 0,
571      "context_window": 128000,
572      "default_max_tokens": 32000,
573      "can_reason": false,
574      "supports_attachments": false,
575      "options": {}
576    },
577    {
578      "id": "qwen3-235b-a22b-thinking-2507",
579      "name": "Qwen 3 235B A22B Thinking 2507",
580      "cost_per_1m_in": 0.45,
581      "cost_per_1m_out": 3.5,
582      "cost_per_1m_in_cached": 0,
583      "cost_per_1m_out_cached": 0,
584      "context_window": 128000,
585      "default_max_tokens": 32000,
586      "can_reason": true,
587      "reasoning_levels": [
588        "low",
589        "medium",
590        "high"
591      ],
592      "default_reasoning_effort": "medium",
593      "supports_attachments": false,
594      "options": {}
595    },
596    {
597      "id": "qwen3-coder-480b-a35b-instruct-turbo",
598      "name": "Qwen 3 Coder 480B Turbo",
599      "cost_per_1m_in": 0.35,
600      "cost_per_1m_out": 1.5,
601      "cost_per_1m_in_cached": 0,
602      "cost_per_1m_out_cached": 0,
603      "context_window": 256000,
604      "default_max_tokens": 32768,
605      "can_reason": false,
606      "supports_attachments": false,
607      "options": {}
608    },
609    {
610      "id": "qwen3-coder-480b-a35b-instruct",
611      "name": "Qwen 3 Coder 480b",
612      "cost_per_1m_in": 0.75,
613      "cost_per_1m_out": 3,
614      "cost_per_1m_in_cached": 0,
615      "cost_per_1m_out_cached": 0,
616      "context_window": 256000,
617      "default_max_tokens": 32768,
618      "can_reason": false,
619      "supports_attachments": false,
620      "options": {}
621    },
622    {
623      "id": "qwen3-next-80b",
624      "name": "Qwen 3 Next 80b",
625      "cost_per_1m_in": 0.35,
626      "cost_per_1m_out": 1.9,
627      "cost_per_1m_in_cached": 0,
628      "cost_per_1m_out_cached": 0,
629      "context_window": 256000,
630      "default_max_tokens": 32768,
631      "can_reason": false,
632      "supports_attachments": false,
633      "options": {}
634    },
635    {
636      "id": "qwen3-5-35b-a3b",
637      "name": "Qwen 3.5 35B A3B",
638      "cost_per_1m_in": 0.3125,
639      "cost_per_1m_out": 1.25,
640      "cost_per_1m_in_cached": 0,
641      "cost_per_1m_out_cached": 0,
642      "context_window": 256000,
643      "default_max_tokens": 32768,
644      "can_reason": true,
645      "reasoning_levels": [
646        "low",
647        "medium",
648        "high"
649      ],
650      "default_reasoning_effort": "medium",
651      "supports_attachments": true,
652      "options": {
653        "temperature": 1,
654        "top_p": 0.95
655      }
656    },
657    {
658      "id": "qwen3-5-9b",
659      "name": "Qwen 3.5 9B",
660      "cost_per_1m_in": 0.05,
661      "cost_per_1m_out": 0.15,
662      "cost_per_1m_in_cached": 0,
663      "cost_per_1m_out_cached": 0,
664      "context_window": 256000,
665      "default_max_tokens": 32768,
666      "can_reason": true,
667      "reasoning_levels": [
668        "low",
669        "medium",
670        "high"
671      ],
672      "default_reasoning_effort": "medium",
673      "supports_attachments": true,
674      "options": {}
675    },
676    {
677      "id": "qwen3-vl-235b-a22b",
678      "name": "Qwen3 VL 235B",
679      "cost_per_1m_in": 0.25,
680      "cost_per_1m_out": 1.5,
681      "cost_per_1m_in_cached": 0,
682      "cost_per_1m_out_cached": 0,
683      "context_window": 256000,
684      "default_max_tokens": 32768,
685      "can_reason": false,
686      "supports_attachments": true,
687      "options": {}
688    },
689    {
690      "id": "mistral-31-24b",
691      "name": "Venice Medium",
692      "cost_per_1m_in": 0.5,
693      "cost_per_1m_out": 2,
694      "cost_per_1m_in_cached": 0,
695      "cost_per_1m_out_cached": 0,
696      "context_window": 128000,
697      "default_max_tokens": 32000,
698      "can_reason": false,
699      "supports_attachments": true,
700      "options": {}
701    },
702    {
703      "id": "venice-uncensored-role-play",
704      "name": "Venice Role Play Uncensored",
705      "cost_per_1m_in": 0.5,
706      "cost_per_1m_out": 2,
707      "cost_per_1m_in_cached": 0,
708      "cost_per_1m_out_cached": 0,
709      "context_window": 128000,
710      "default_max_tokens": 32000,
711      "can_reason": false,
712      "supports_attachments": true,
713      "options": {}
714    },
715    {
716      "id": "qwen3-4b",
717      "name": "Venice Small",
718      "cost_per_1m_in": 0.05,
719      "cost_per_1m_out": 0.15,
720      "cost_per_1m_in_cached": 0,
721      "cost_per_1m_out_cached": 0,
722      "context_window": 32000,
723      "default_max_tokens": 8000,
724      "can_reason": true,
725      "reasoning_levels": [
726        "low",
727        "medium",
728        "high"
729      ],
730      "default_reasoning_effort": "medium",
731      "supports_attachments": false,
732      "options": {}
733    }
734  ]
735}