venice.json

  1{
  2  "name": "Venice AI",
  3  "id": "venice",
  4  "api_key": "$VENICE_API_KEY",
  5  "api_endpoint": "https://api.venice.ai/api/v1",
  6  "type": "openai-compat",
  7  "default_large_model_id": "claude-opus-4-6",
  8  "default_small_model_id": "minimax-m25",
  9  "models": [
 10    {
 11      "id": "claude-opus-4-5",
 12      "name": "Claude Opus 4.5",
 13      "cost_per_1m_in": 6,
 14      "cost_per_1m_out": 30,
 15      "cost_per_1m_in_cached": 0,
 16      "cost_per_1m_out_cached": 0,
 17      "context_window": 198000,
 18      "default_max_tokens": 32768,
 19      "can_reason": true,
 20      "reasoning_levels": [
 21        "low",
 22        "medium",
 23        "high"
 24      ],
 25      "default_reasoning_effort": "medium",
 26      "supports_attachments": true,
 27      "options": {}
 28    },
 29    {
 30      "id": "claude-opus-4-6",
 31      "name": "Claude Opus 4.6",
 32      "cost_per_1m_in": 6,
 33      "cost_per_1m_out": 30,
 34      "cost_per_1m_in_cached": 0,
 35      "cost_per_1m_out_cached": 0,
 36      "context_window": 1000000,
 37      "default_max_tokens": 128000,
 38      "can_reason": true,
 39      "reasoning_levels": [
 40        "low",
 41        "medium",
 42        "high"
 43      ],
 44      "default_reasoning_effort": "medium",
 45      "supports_attachments": true,
 46      "options": {}
 47    },
 48    {
 49      "id": "claude-sonnet-4-5",
 50      "name": "Claude Sonnet 4.5",
 51      "cost_per_1m_in": 3.75,
 52      "cost_per_1m_out": 18.75,
 53      "cost_per_1m_in_cached": 0,
 54      "cost_per_1m_out_cached": 0,
 55      "context_window": 198000,
 56      "default_max_tokens": 64000,
 57      "can_reason": true,
 58      "reasoning_levels": [
 59        "low",
 60        "medium",
 61        "high"
 62      ],
 63      "default_reasoning_effort": "medium",
 64      "supports_attachments": true,
 65      "options": {}
 66    },
 67    {
 68      "id": "claude-sonnet-4-6",
 69      "name": "Claude Sonnet 4.6",
 70      "cost_per_1m_in": 3.6,
 71      "cost_per_1m_out": 18,
 72      "cost_per_1m_in_cached": 0,
 73      "cost_per_1m_out_cached": 0,
 74      "context_window": 1000000,
 75      "default_max_tokens": 64000,
 76      "can_reason": true,
 77      "reasoning_levels": [
 78        "low",
 79        "medium",
 80        "high"
 81      ],
 82      "default_reasoning_effort": "medium",
 83      "supports_attachments": true,
 84      "options": {}
 85    },
 86    {
 87      "id": "deepseek-v3.2",
 88      "name": "DeepSeek V3.2",
 89      "cost_per_1m_in": 0.33,
 90      "cost_per_1m_out": 0.48,
 91      "cost_per_1m_in_cached": 0,
 92      "cost_per_1m_out_cached": 0,
 93      "context_window": 160000,
 94      "default_max_tokens": 32768,
 95      "can_reason": true,
 96      "supports_attachments": false,
 97      "options": {}
 98    },
 99    {
100      "id": "zai-org-glm-4.6",
101      "name": "GLM 4.6",
102      "cost_per_1m_in": 0.85,
103      "cost_per_1m_out": 2.75,
104      "cost_per_1m_in_cached": 0,
105      "cost_per_1m_out_cached": 0,
106      "context_window": 198000,
107      "default_max_tokens": 16384,
108      "can_reason": false,
109      "supports_attachments": false,
110      "options": {}
111    },
112    {
113      "id": "zai-org-glm-4.7",
114      "name": "GLM 4.7",
115      "cost_per_1m_in": 0.55,
116      "cost_per_1m_out": 2.65,
117      "cost_per_1m_in_cached": 0,
118      "cost_per_1m_out_cached": 0,
119      "context_window": 198000,
120      "default_max_tokens": 16384,
121      "can_reason": true,
122      "supports_attachments": false,
123      "options": {}
124    },
125    {
126      "id": "zai-org-glm-4.7-flash",
127      "name": "GLM 4.7 Flash",
128      "cost_per_1m_in": 0.125,
129      "cost_per_1m_out": 0.5,
130      "cost_per_1m_in_cached": 0,
131      "cost_per_1m_out_cached": 0,
132      "context_window": 128000,
133      "default_max_tokens": 16384,
134      "can_reason": true,
135      "reasoning_levels": [
136        "low",
137        "medium",
138        "high"
139      ],
140      "default_reasoning_effort": "medium",
141      "supports_attachments": false,
142      "options": {}
143    },
144    {
145      "id": "olafangensan-glm-4.7-flash-heretic",
146      "name": "GLM 4.7 Flash Heretic",
147      "cost_per_1m_in": 0.14,
148      "cost_per_1m_out": 0.8,
149      "cost_per_1m_in_cached": 0,
150      "cost_per_1m_out_cached": 0,
151      "context_window": 200000,
152      "default_max_tokens": 24000,
153      "can_reason": true,
154      "supports_attachments": false,
155      "options": {}
156    },
157    {
158      "id": "zai-org-glm-5",
159      "name": "GLM 5",
160      "cost_per_1m_in": 1,
161      "cost_per_1m_out": 3.2,
162      "cost_per_1m_in_cached": 0,
163      "cost_per_1m_out_cached": 0,
164      "context_window": 198000,
165      "default_max_tokens": 32000,
166      "can_reason": true,
167      "supports_attachments": false,
168      "options": {}
169    },
170    {
171      "id": "openai-gpt-4o-2024-11-20",
172      "name": "GPT-4o",
173      "cost_per_1m_in": 3.125,
174      "cost_per_1m_out": 12.5,
175      "cost_per_1m_in_cached": 0,
176      "cost_per_1m_out_cached": 0,
177      "context_window": 128000,
178      "default_max_tokens": 16384,
179      "can_reason": false,
180      "supports_attachments": true,
181      "options": {}
182    },
183    {
184      "id": "openai-gpt-4o-mini-2024-07-18",
185      "name": "GPT-4o Mini",
186      "cost_per_1m_in": 0.1875,
187      "cost_per_1m_out": 0.75,
188      "cost_per_1m_in_cached": 0,
189      "cost_per_1m_out_cached": 0,
190      "context_window": 128000,
191      "default_max_tokens": 16384,
192      "can_reason": false,
193      "supports_attachments": true,
194      "options": {}
195    },
196    {
197      "id": "openai-gpt-52",
198      "name": "GPT-5.2",
199      "cost_per_1m_in": 2.19,
200      "cost_per_1m_out": 17.5,
201      "cost_per_1m_in_cached": 0,
202      "cost_per_1m_out_cached": 0,
203      "context_window": 256000,
204      "default_max_tokens": 65536,
205      "can_reason": true,
206      "reasoning_levels": [
207        "low",
208        "medium",
209        "high"
210      ],
211      "default_reasoning_effort": "medium",
212      "supports_attachments": false,
213      "options": {}
214    },
215    {
216      "id": "openai-gpt-52-codex",
217      "name": "GPT-5.2 Codex",
218      "cost_per_1m_in": 2.19,
219      "cost_per_1m_out": 17.5,
220      "cost_per_1m_in_cached": 0,
221      "cost_per_1m_out_cached": 0,
222      "context_window": 256000,
223      "default_max_tokens": 65536,
224      "can_reason": true,
225      "reasoning_levels": [
226        "low",
227        "medium",
228        "high"
229      ],
230      "default_reasoning_effort": "medium",
231      "supports_attachments": true,
232      "options": {}
233    },
234    {
235      "id": "openai-gpt-53-codex",
236      "name": "GPT-5.3 Codex",
237      "cost_per_1m_in": 2.19,
238      "cost_per_1m_out": 17.5,
239      "cost_per_1m_in_cached": 0,
240      "cost_per_1m_out_cached": 0,
241      "context_window": 400000,
242      "default_max_tokens": 128000,
243      "can_reason": true,
244      "reasoning_levels": [
245        "low",
246        "medium",
247        "high"
248      ],
249      "default_reasoning_effort": "medium",
250      "supports_attachments": true,
251      "options": {}
252    },
253    {
254      "id": "openai-gpt-54",
255      "name": "GPT-5.4",
256      "cost_per_1m_in": 3.13,
257      "cost_per_1m_out": 18.8,
258      "cost_per_1m_in_cached": 0,
259      "cost_per_1m_out_cached": 0,
260      "context_window": 1000000,
261      "default_max_tokens": 131072,
262      "can_reason": true,
263      "reasoning_levels": [
264        "low",
265        "medium",
266        "high"
267      ],
268      "default_reasoning_effort": "medium",
269      "supports_attachments": true,
270      "options": {}
271    },
272    {
273      "id": "openai-gpt-54-mini",
274      "name": "GPT-5.4 Mini",
275      "cost_per_1m_in": 0.9375,
276      "cost_per_1m_out": 5.625,
277      "cost_per_1m_in_cached": 0,
278      "cost_per_1m_out_cached": 0,
279      "context_window": 400000,
280      "default_max_tokens": 128000,
281      "can_reason": true,
282      "reasoning_levels": [
283        "low",
284        "medium",
285        "high"
286      ],
287      "default_reasoning_effort": "medium",
288      "supports_attachments": true,
289      "options": {}
290    },
291    {
292      "id": "openai-gpt-54-pro",
293      "name": "GPT-5.4 Pro",
294      "cost_per_1m_in": 37.5,
295      "cost_per_1m_out": 225,
296      "cost_per_1m_in_cached": 0,
297      "cost_per_1m_out_cached": 0,
298      "context_window": 1000000,
299      "default_max_tokens": 128000,
300      "can_reason": true,
301      "reasoning_levels": [
302        "low",
303        "medium",
304        "high"
305      ],
306      "default_reasoning_effort": "medium",
307      "supports_attachments": true,
308      "options": {}
309    },
310    {
311      "id": "gemini-3-flash-preview",
312      "name": "Gemini 3 Flash Preview",
313      "cost_per_1m_in": 0.7,
314      "cost_per_1m_out": 3.75,
315      "cost_per_1m_in_cached": 0,
316      "cost_per_1m_out_cached": 0,
317      "context_window": 256000,
318      "default_max_tokens": 65536,
319      "can_reason": true,
320      "reasoning_levels": [
321        "low",
322        "medium",
323        "high"
324      ],
325      "default_reasoning_effort": "medium",
326      "supports_attachments": true,
327      "options": {}
328    },
329    {
330      "id": "gemini-3-1-pro-preview",
331      "name": "Gemini 3.1 Pro Preview",
332      "cost_per_1m_in": 2.5,
333      "cost_per_1m_out": 15,
334      "cost_per_1m_in_cached": 0,
335      "cost_per_1m_out_cached": 0,
336      "context_window": 1000000,
337      "default_max_tokens": 32768,
338      "can_reason": true,
339      "reasoning_levels": [
340        "low",
341        "medium",
342        "high"
343      ],
344      "default_reasoning_effort": "medium",
345      "supports_attachments": true,
346      "options": {}
347    },
348    {
349      "id": "google-gemma-3-27b-it",
350      "name": "Google Gemma 3 27B Instruct",
351      "cost_per_1m_in": 0.12,
352      "cost_per_1m_out": 0.2,
353      "cost_per_1m_in_cached": 0,
354      "cost_per_1m_out_cached": 0,
355      "context_window": 198000,
356      "default_max_tokens": 16384,
357      "can_reason": false,
358      "supports_attachments": true,
359      "options": {}
360    },
361    {
362      "id": "grok-41-fast",
363      "name": "Grok 4.1 Fast",
364      "cost_per_1m_in": 0.25,
365      "cost_per_1m_out": 0.625,
366      "cost_per_1m_in_cached": 0,
367      "cost_per_1m_out_cached": 0,
368      "context_window": 1000000,
369      "default_max_tokens": 30000,
370      "can_reason": true,
371      "reasoning_levels": [
372        "low",
373        "medium",
374        "high"
375      ],
376      "default_reasoning_effort": "medium",
377      "supports_attachments": true,
378      "options": {}
379    },
380    {
381      "id": "grok-4-20-beta",
382      "name": "Grok 4.20 Beta",
383      "cost_per_1m_in": 2.5,
384      "cost_per_1m_out": 7.5,
385      "cost_per_1m_in_cached": 0,
386      "cost_per_1m_out_cached": 0,
387      "context_window": 2000000,
388      "default_max_tokens": 128000,
389      "can_reason": true,
390      "reasoning_levels": [
391        "low",
392        "medium",
393        "high"
394      ],
395      "default_reasoning_effort": "medium",
396      "supports_attachments": true,
397      "options": {}
398    },
399    {
400      "id": "grok-code-fast-1",
401      "name": "Grok Code Fast 1",
402      "cost_per_1m_in": 0.25,
403      "cost_per_1m_out": 1.87,
404      "cost_per_1m_in_cached": 0,
405      "cost_per_1m_out_cached": 0,
406      "context_window": 256000,
407      "default_max_tokens": 10000,
408      "can_reason": true,
409      "reasoning_levels": [
410        "low",
411        "medium",
412        "high"
413      ],
414      "default_reasoning_effort": "medium",
415      "supports_attachments": false,
416      "options": {}
417    },
418    {
419      "id": "kimi-k2-thinking",
420      "name": "Kimi K2 Thinking",
421      "cost_per_1m_in": 0.75,
422      "cost_per_1m_out": 3.2,
423      "cost_per_1m_in_cached": 0,
424      "cost_per_1m_out_cached": 0,
425      "context_window": 256000,
426      "default_max_tokens": 65536,
427      "can_reason": true,
428      "reasoning_levels": [
429        "low",
430        "medium",
431        "high"
432      ],
433      "default_reasoning_effort": "medium",
434      "supports_attachments": false,
435      "options": {}
436    },
437    {
438      "id": "kimi-k2-5",
439      "name": "Kimi K2.5",
440      "cost_per_1m_in": 0.56,
441      "cost_per_1m_out": 3.5,
442      "cost_per_1m_in_cached": 0,
443      "cost_per_1m_out_cached": 0,
444      "context_window": 256000,
445      "default_max_tokens": 65536,
446      "can_reason": true,
447      "reasoning_levels": [
448        "low",
449        "medium",
450        "high"
451      ],
452      "default_reasoning_effort": "medium",
453      "supports_attachments": true,
454      "options": {}
455    },
456    {
457      "id": "llama-3.2-3b",
458      "name": "Llama 3.2 3B",
459      "cost_per_1m_in": 0.15,
460      "cost_per_1m_out": 0.6,
461      "cost_per_1m_in_cached": 0,
462      "cost_per_1m_out_cached": 0,
463      "context_window": 128000,
464      "default_max_tokens": 4096,
465      "can_reason": false,
466      "supports_attachments": false,
467      "options": {}
468    },
469    {
470      "id": "llama-3.3-70b",
471      "name": "Llama 3.3 70B",
472      "cost_per_1m_in": 0.7,
473      "cost_per_1m_out": 2.8,
474      "cost_per_1m_in_cached": 0,
475      "cost_per_1m_out_cached": 0,
476      "context_window": 128000,
477      "default_max_tokens": 4096,
478      "can_reason": false,
479      "supports_attachments": false,
480      "options": {}
481    },
482    {
483      "id": "minimax-m21",
484      "name": "MiniMax M2.1",
485      "cost_per_1m_in": 0.35,
486      "cost_per_1m_out": 1.5,
487      "cost_per_1m_in_cached": 0,
488      "cost_per_1m_out_cached": 0,
489      "context_window": 198000,
490      "default_max_tokens": 32768,
491      "can_reason": true,
492      "reasoning_levels": [
493        "low",
494        "medium",
495        "high"
496      ],
497      "default_reasoning_effort": "medium",
498      "supports_attachments": false,
499      "options": {}
500    },
501    {
502      "id": "minimax-m25",
503      "name": "MiniMax M2.5",
504      "cost_per_1m_in": 0.34,
505      "cost_per_1m_out": 1.19,
506      "cost_per_1m_in_cached": 0,
507      "cost_per_1m_out_cached": 0,
508      "context_window": 198000,
509      "default_max_tokens": 32768,
510      "can_reason": true,
511      "reasoning_levels": [
512        "low",
513        "medium",
514        "high"
515      ],
516      "default_reasoning_effort": "medium",
517      "supports_attachments": false,
518      "options": {}
519    },
520    {
521      "id": "minimax-m27",
522      "name": "MiniMax M2.7",
523      "cost_per_1m_in": 0.375,
524      "cost_per_1m_out": 1.5,
525      "cost_per_1m_in_cached": 0,
526      "cost_per_1m_out_cached": 0,
527      "context_window": 198000,
528      "default_max_tokens": 32768,
529      "can_reason": true,
530      "reasoning_levels": [
531        "low",
532        "medium",
533        "high"
534      ],
535      "default_reasoning_effort": "medium",
536      "supports_attachments": false,
537      "options": {}
538    },
539    {
540      "id": "mistral-small-3-2-24b-instruct",
541      "name": "Mistral Small 3.2 24B Instruct",
542      "cost_per_1m_in": 0.09375,
543      "cost_per_1m_out": 0.25,
544      "cost_per_1m_in_cached": 0,
545      "cost_per_1m_out_cached": 0,
546      "context_window": 256000,
547      "default_max_tokens": 16384,
548      "can_reason": false,
549      "supports_attachments": false,
550      "options": {}
551    },
552    {
553      "id": "nvidia-nemotron-3-nano-30b-a3b",
554      "name": "NVIDIA Nemotron 3 Nano 30B",
555      "cost_per_1m_in": 0.075,
556      "cost_per_1m_out": 0.3,
557      "cost_per_1m_in_cached": 0,
558      "cost_per_1m_out_cached": 0,
559      "context_window": 128000,
560      "default_max_tokens": 16384,
561      "can_reason": false,
562      "supports_attachments": false,
563      "options": {}
564    },
565    {
566      "id": "openai-gpt-oss-120b",
567      "name": "OpenAI GPT OSS 120B",
568      "cost_per_1m_in": 0.07,
569      "cost_per_1m_out": 0.3,
570      "cost_per_1m_in_cached": 0,
571      "cost_per_1m_out_cached": 0,
572      "context_window": 128000,
573      "default_max_tokens": 16384,
574      "can_reason": false,
575      "supports_attachments": false,
576      "options": {}
577    },
578    {
579      "id": "qwen3-235b-a22b-instruct-2507",
580      "name": "Qwen 3 235B A22B Instruct 2507",
581      "cost_per_1m_in": 0.15,
582      "cost_per_1m_out": 0.75,
583      "cost_per_1m_in_cached": 0,
584      "cost_per_1m_out_cached": 0,
585      "context_window": 128000,
586      "default_max_tokens": 16384,
587      "can_reason": false,
588      "supports_attachments": false,
589      "options": {}
590    },
591    {
592      "id": "qwen3-235b-a22b-thinking-2507",
593      "name": "Qwen 3 235B A22B Thinking 2507",
594      "cost_per_1m_in": 0.45,
595      "cost_per_1m_out": 3.5,
596      "cost_per_1m_in_cached": 0,
597      "cost_per_1m_out_cached": 0,
598      "context_window": 128000,
599      "default_max_tokens": 16384,
600      "can_reason": true,
601      "supports_attachments": false,
602      "options": {}
603    },
604    {
605      "id": "qwen3-coder-480b-a35b-instruct-turbo",
606      "name": "Qwen 3 Coder 480B Turbo",
607      "cost_per_1m_in": 0.35,
608      "cost_per_1m_out": 1.5,
609      "cost_per_1m_in_cached": 0,
610      "cost_per_1m_out_cached": 0,
611      "context_window": 256000,
612      "default_max_tokens": 65536,
613      "can_reason": false,
614      "supports_attachments": false,
615      "options": {}
616    },
617    {
618      "id": "qwen3-coder-480b-a35b-instruct",
619      "name": "Qwen 3 Coder 480b",
620      "cost_per_1m_in": 0.75,
621      "cost_per_1m_out": 3,
622      "cost_per_1m_in_cached": 0,
623      "cost_per_1m_out_cached": 0,
624      "context_window": 256000,
625      "default_max_tokens": 65536,
626      "can_reason": false,
627      "supports_attachments": false,
628      "options": {}
629    },
630    {
631      "id": "qwen3-next-80b",
632      "name": "Qwen 3 Next 80b",
633      "cost_per_1m_in": 0.35,
634      "cost_per_1m_out": 1.9,
635      "cost_per_1m_in_cached": 0,
636      "cost_per_1m_out_cached": 0,
637      "context_window": 256000,
638      "default_max_tokens": 16384,
639      "can_reason": false,
640      "supports_attachments": false,
641      "options": {}
642    },
643    {
644      "id": "qwen3-5-35b-a3b",
645      "name": "Qwen 3.5 35B A3B",
646      "cost_per_1m_in": 0.3125,
647      "cost_per_1m_out": 1.25,
648      "cost_per_1m_in_cached": 0,
649      "cost_per_1m_out_cached": 0,
650      "context_window": 256000,
651      "default_max_tokens": 65536,
652      "can_reason": true,
653      "supports_attachments": true,
654      "options": {
655        "temperature": 1,
656        "top_p": 0.95
657      }
658    },
659    {
660      "id": "qwen3-5-9b",
661      "name": "Qwen 3.5 9B",
662      "cost_per_1m_in": 0.05,
663      "cost_per_1m_out": 0.15,
664      "cost_per_1m_in_cached": 0,
665      "cost_per_1m_out_cached": 0,
666      "context_window": 256000,
667      "default_max_tokens": 65536,
668      "can_reason": true,
669      "supports_attachments": true,
670      "options": {}
671    },
672    {
673      "id": "qwen3-vl-235b-a22b",
674      "name": "Qwen3 VL 235B",
675      "cost_per_1m_in": 0.25,
676      "cost_per_1m_out": 1.5,
677      "cost_per_1m_in_cached": 0,
678      "cost_per_1m_out_cached": 0,
679      "context_window": 256000,
680      "default_max_tokens": 16384,
681      "can_reason": false,
682      "supports_attachments": true,
683      "options": {}
684    },
685    {
686      "id": "venice-uncensored-role-play",
687      "name": "Venice Role Play Uncensored",
688      "cost_per_1m_in": 0.5,
689      "cost_per_1m_out": 2,
690      "cost_per_1m_in_cached": 0,
691      "cost_per_1m_out_cached": 0,
692      "context_window": 128000,
693      "default_max_tokens": 4096,
694      "can_reason": false,
695      "supports_attachments": true,
696      "options": {}
697    }
698  ]
699}