venice.json

  1{
  2  "name": "Venice AI",
  3  "id": "venice",
  4  "api_key": "$VENICE_API_KEY",
  5  "api_endpoint": "https://api.venice.ai/api/v1",
  6  "type": "openai-compat",
  7  "default_large_model_id": "claude-opus-4-6-fast",
  8  "default_small_model_id": "deepseek-v4-flash",
  9  "models": [
 10    {
 11      "id": "claude-opus-4-5",
 12      "name": "Claude Opus 4.5",
 13      "cost_per_1m_in": 6,
 14      "cost_per_1m_out": 30,
 15      "cost_per_1m_in_cached": 0,
 16      "cost_per_1m_out_cached": 0,
 17      "context_window": 198000,
 18      "default_max_tokens": 32768,
 19      "can_reason": true,
 20      "reasoning_levels": [
 21        "low",
 22        "medium",
 23        "high"
 24      ],
 25      "default_reasoning_effort": "medium",
 26      "supports_attachments": true
 27    },
 28    {
 29      "id": "claude-opus-4-6",
 30      "name": "Claude Opus 4.6",
 31      "cost_per_1m_in": 6,
 32      "cost_per_1m_out": 30,
 33      "cost_per_1m_in_cached": 0,
 34      "cost_per_1m_out_cached": 0,
 35      "context_window": 1000000,
 36      "default_max_tokens": 128000,
 37      "can_reason": true,
 38      "reasoning_levels": [
 39        "low",
 40        "medium",
 41        "high"
 42      ],
 43      "default_reasoning_effort": "medium",
 44      "supports_attachments": true
 45    },
 46    {
 47      "id": "claude-opus-4-6-fast",
 48      "name": "Claude Opus 4.6 Fast",
 49      "cost_per_1m_in": 36,
 50      "cost_per_1m_out": 180,
 51      "cost_per_1m_in_cached": 0,
 52      "cost_per_1m_out_cached": 0,
 53      "context_window": 1000000,
 54      "default_max_tokens": 128000,
 55      "can_reason": true,
 56      "reasoning_levels": [
 57        "low",
 58        "medium",
 59        "high"
 60      ],
 61      "default_reasoning_effort": "medium",
 62      "supports_attachments": true
 63    },
 64    {
 65      "id": "claude-opus-4-7",
 66      "name": "Claude Opus 4.7",
 67      "cost_per_1m_in": 6,
 68      "cost_per_1m_out": 30,
 69      "cost_per_1m_in_cached": 0,
 70      "cost_per_1m_out_cached": 0,
 71      "context_window": 1000000,
 72      "default_max_tokens": 128000,
 73      "can_reason": true,
 74      "reasoning_levels": [
 75        "low",
 76        "medium",
 77        "high"
 78      ],
 79      "default_reasoning_effort": "medium",
 80      "supports_attachments": true
 81    },
 82    {
 83      "id": "claude-sonnet-4-5",
 84      "name": "Claude Sonnet 4.5",
 85      "cost_per_1m_in": 3.75,
 86      "cost_per_1m_out": 18.75,
 87      "cost_per_1m_in_cached": 0,
 88      "cost_per_1m_out_cached": 0,
 89      "context_window": 198000,
 90      "default_max_tokens": 64000,
 91      "can_reason": true,
 92      "reasoning_levels": [
 93        "low",
 94        "medium",
 95        "high"
 96      ],
 97      "default_reasoning_effort": "medium",
 98      "supports_attachments": true
 99    },
100    {
101      "id": "claude-sonnet-4-6",
102      "name": "Claude Sonnet 4.6",
103      "cost_per_1m_in": 3.6,
104      "cost_per_1m_out": 18,
105      "cost_per_1m_in_cached": 0,
106      "cost_per_1m_out_cached": 0,
107      "context_window": 1000000,
108      "default_max_tokens": 64000,
109      "can_reason": true,
110      "reasoning_levels": [
111        "low",
112        "medium",
113        "high"
114      ],
115      "default_reasoning_effort": "medium",
116      "supports_attachments": true
117    },
118    {
119      "id": "deepseek-v3.2",
120      "name": "DeepSeek V3.2",
121      "cost_per_1m_in": 0.33,
122      "cost_per_1m_out": 0.48,
123      "cost_per_1m_in_cached": 0,
124      "cost_per_1m_out_cached": 0,
125      "context_window": 160000,
126      "default_max_tokens": 32768,
127      "can_reason": true,
128      "supports_attachments": false
129    },
130    {
131      "id": "deepseek-v4-flash",
132      "name": "DeepSeek V4 Flash",
133      "cost_per_1m_in": 0.175,
134      "cost_per_1m_out": 0.35,
135      "cost_per_1m_in_cached": 0,
136      "cost_per_1m_out_cached": 0,
137      "context_window": 1000000,
138      "default_max_tokens": 32768,
139      "can_reason": true,
140      "reasoning_levels": [
141        "low",
142        "medium",
143        "high"
144      ],
145      "default_reasoning_effort": "medium",
146      "supports_attachments": false
147    },
148    {
149      "id": "deepseek-v4-pro",
150      "name": "DeepSeek V4 Pro",
151      "cost_per_1m_in": 2.175,
152      "cost_per_1m_out": 4.35,
153      "cost_per_1m_in_cached": 0,
154      "cost_per_1m_out_cached": 0,
155      "context_window": 1000000,
156      "default_max_tokens": 32768,
157      "can_reason": true,
158      "reasoning_levels": [
159        "low",
160        "medium",
161        "high"
162      ],
163      "default_reasoning_effort": "medium",
164      "supports_attachments": false
165    },
166    {
167      "id": "zai-org-glm-4.6",
168      "name": "GLM 4.6",
169      "cost_per_1m_in": 0.85,
170      "cost_per_1m_out": 2.75,
171      "cost_per_1m_in_cached": 0,
172      "cost_per_1m_out_cached": 0,
173      "context_window": 198000,
174      "default_max_tokens": 16384,
175      "can_reason": true,
176      "supports_attachments": false
177    },
178    {
179      "id": "zai-org-glm-4.7",
180      "name": "GLM 4.7",
181      "cost_per_1m_in": 0.55,
182      "cost_per_1m_out": 2.65,
183      "cost_per_1m_in_cached": 0,
184      "cost_per_1m_out_cached": 0,
185      "context_window": 198000,
186      "default_max_tokens": 16384,
187      "can_reason": true,
188      "supports_attachments": false
189    },
190    {
191      "id": "zai-org-glm-4.7-flash",
192      "name": "GLM 4.7 Flash",
193      "cost_per_1m_in": 0.125,
194      "cost_per_1m_out": 0.5,
195      "cost_per_1m_in_cached": 0,
196      "cost_per_1m_out_cached": 0,
197      "context_window": 128000,
198      "default_max_tokens": 16384,
199      "can_reason": true,
200      "reasoning_levels": [
201        "low",
202        "medium",
203        "high"
204      ],
205      "default_reasoning_effort": "medium",
206      "supports_attachments": false
207    },
208    {
209      "id": "olafangensan-glm-4.7-flash-heretic",
210      "name": "GLM 4.7 Flash Heretic",
211      "cost_per_1m_in": 0.14,
212      "cost_per_1m_out": 0.8,
213      "cost_per_1m_in_cached": 0,
214      "cost_per_1m_out_cached": 0,
215      "context_window": 200000,
216      "default_max_tokens": 24000,
217      "can_reason": true,
218      "supports_attachments": false
219    },
220    {
221      "id": "zai-org-glm-5",
222      "name": "GLM 5",
223      "cost_per_1m_in": 1,
224      "cost_per_1m_out": 3.2,
225      "cost_per_1m_in_cached": 0,
226      "cost_per_1m_out_cached": 0,
227      "context_window": 198000,
228      "default_max_tokens": 32000,
229      "can_reason": true,
230      "supports_attachments": false
231    },
232    {
233      "id": "z-ai-glm-5-turbo",
234      "name": "GLM 5 Turbo",
235      "cost_per_1m_in": 1.2,
236      "cost_per_1m_out": 4,
237      "cost_per_1m_in_cached": 0,
238      "cost_per_1m_out_cached": 0,
239      "context_window": 200000,
240      "default_max_tokens": 32768,
241      "can_reason": true,
242      "reasoning_levels": [
243        "low",
244        "medium",
245        "high"
246      ],
247      "default_reasoning_effort": "medium",
248      "supports_attachments": false
249    },
250    {
251      "id": "zai-org-glm-5-1",
252      "name": "GLM 5.1",
253      "cost_per_1m_in": 1.75,
254      "cost_per_1m_out": 5.5,
255      "cost_per_1m_in_cached": 0,
256      "cost_per_1m_out_cached": 0,
257      "context_window": 200000,
258      "default_max_tokens": 24000,
259      "can_reason": true,
260      "supports_attachments": false
261    },
262    {
263      "id": "z-ai-glm-5v-turbo",
264      "name": "GLM 5V Turbo",
265      "cost_per_1m_in": 1.5,
266      "cost_per_1m_out": 5,
267      "cost_per_1m_in_cached": 0,
268      "cost_per_1m_out_cached": 0,
269      "context_window": 200000,
270      "default_max_tokens": 32768,
271      "can_reason": true,
272      "reasoning_levels": [
273        "low",
274        "medium",
275        "high"
276      ],
277      "default_reasoning_effort": "medium",
278      "supports_attachments": true
279    },
280    {
281      "id": "openai-gpt-4o-2024-11-20",
282      "name": "GPT-4o",
283      "cost_per_1m_in": 3.125,
284      "cost_per_1m_out": 12.5,
285      "cost_per_1m_in_cached": 0,
286      "cost_per_1m_out_cached": 0,
287      "context_window": 128000,
288      "default_max_tokens": 16384,
289      "can_reason": false,
290      "supports_attachments": true
291    },
292    {
293      "id": "openai-gpt-4o-mini-2024-07-18",
294      "name": "GPT-4o Mini",
295      "cost_per_1m_in": 0.1875,
296      "cost_per_1m_out": 0.75,
297      "cost_per_1m_in_cached": 0,
298      "cost_per_1m_out_cached": 0,
299      "context_window": 128000,
300      "default_max_tokens": 16384,
301      "can_reason": false,
302      "supports_attachments": true
303    },
304    {
305      "id": "openai-gpt-52",
306      "name": "GPT-5.2",
307      "cost_per_1m_in": 2.19,
308      "cost_per_1m_out": 17.5,
309      "cost_per_1m_in_cached": 0,
310      "cost_per_1m_out_cached": 0,
311      "context_window": 256000,
312      "default_max_tokens": 65536,
313      "can_reason": true,
314      "reasoning_levels": [
315        "low",
316        "medium",
317        "high"
318      ],
319      "default_reasoning_effort": "medium",
320      "supports_attachments": false
321    },
322    {
323      "id": "openai-gpt-52-codex",
324      "name": "GPT-5.2 Codex",
325      "cost_per_1m_in": 2.19,
326      "cost_per_1m_out": 17.5,
327      "cost_per_1m_in_cached": 0,
328      "cost_per_1m_out_cached": 0,
329      "context_window": 256000,
330      "default_max_tokens": 65536,
331      "can_reason": true,
332      "reasoning_levels": [
333        "low",
334        "medium",
335        "high"
336      ],
337      "default_reasoning_effort": "medium",
338      "supports_attachments": true
339    },
340    {
341      "id": "openai-gpt-53-codex",
342      "name": "GPT-5.3 Codex",
343      "cost_per_1m_in": 2.19,
344      "cost_per_1m_out": 17.5,
345      "cost_per_1m_in_cached": 0,
346      "cost_per_1m_out_cached": 0,
347      "context_window": 400000,
348      "default_max_tokens": 128000,
349      "can_reason": true,
350      "reasoning_levels": [
351        "low",
352        "medium",
353        "high"
354      ],
355      "default_reasoning_effort": "medium",
356      "supports_attachments": true
357    },
358    {
359      "id": "openai-gpt-54",
360      "name": "GPT-5.4",
361      "cost_per_1m_in": 3.13,
362      "cost_per_1m_out": 18.8,
363      "cost_per_1m_in_cached": 0,
364      "cost_per_1m_out_cached": 0,
365      "context_window": 1000000,
366      "default_max_tokens": 131072,
367      "can_reason": true,
368      "reasoning_levels": [
369        "low",
370        "medium",
371        "high"
372      ],
373      "default_reasoning_effort": "medium",
374      "supports_attachments": true
375    },
376    {
377      "id": "openai-gpt-54-mini",
378      "name": "GPT-5.4 Mini",
379      "cost_per_1m_in": 0.9375,
380      "cost_per_1m_out": 5.625,
381      "cost_per_1m_in_cached": 0,
382      "cost_per_1m_out_cached": 0,
383      "context_window": 400000,
384      "default_max_tokens": 128000,
385      "can_reason": true,
386      "reasoning_levels": [
387        "low",
388        "medium",
389        "high"
390      ],
391      "default_reasoning_effort": "medium",
392      "supports_attachments": true
393    },
394    {
395      "id": "openai-gpt-54-pro",
396      "name": "GPT-5.4 Pro",
397      "cost_per_1m_in": 37.5,
398      "cost_per_1m_out": 225,
399      "cost_per_1m_in_cached": 0,
400      "cost_per_1m_out_cached": 0,
401      "context_window": 1000000,
402      "default_max_tokens": 128000,
403      "can_reason": true,
404      "reasoning_levels": [
405        "low",
406        "medium",
407        "high"
408      ],
409      "default_reasoning_effort": "medium",
410      "supports_attachments": true
411    },
412    {
413      "id": "openai-gpt-55",
414      "name": "GPT-5.5",
415      "cost_per_1m_in": 6.25,
416      "cost_per_1m_out": 37.5,
417      "cost_per_1m_in_cached": 0,
418      "cost_per_1m_out_cached": 0,
419      "context_window": 1000000,
420      "default_max_tokens": 131072,
421      "can_reason": true,
422      "reasoning_levels": [
423        "low",
424        "medium",
425        "high"
426      ],
427      "default_reasoning_effort": "medium",
428      "supports_attachments": true
429    },
430    {
431      "id": "openai-gpt-55-pro",
432      "name": "GPT-5.5 Pro",
433      "cost_per_1m_in": 37.5,
434      "cost_per_1m_out": 225,
435      "cost_per_1m_in_cached": 0,
436      "cost_per_1m_out_cached": 0,
437      "context_window": 1000000,
438      "default_max_tokens": 128000,
439      "can_reason": true,
440      "reasoning_levels": [
441        "low",
442        "medium",
443        "high"
444      ],
445      "default_reasoning_effort": "medium",
446      "supports_attachments": true
447    },
448    {
449      "id": "gemini-3-flash-preview",
450      "name": "Gemini 3 Flash Preview",
451      "cost_per_1m_in": 0.7,
452      "cost_per_1m_out": 3.75,
453      "cost_per_1m_in_cached": 0,
454      "cost_per_1m_out_cached": 0,
455      "context_window": 256000,
456      "default_max_tokens": 65536,
457      "can_reason": true,
458      "reasoning_levels": [
459        "low",
460        "medium",
461        "high"
462      ],
463      "default_reasoning_effort": "medium",
464      "supports_attachments": true
465    },
466    {
467      "id": "gemini-3-1-pro-preview",
468      "name": "Gemini 3.1 Pro Preview",
469      "cost_per_1m_in": 2.5,
470      "cost_per_1m_out": 15,
471      "cost_per_1m_in_cached": 0,
472      "cost_per_1m_out_cached": 0,
473      "context_window": 1000000,
474      "default_max_tokens": 32768,
475      "can_reason": true,
476      "reasoning_levels": [
477        "low",
478        "medium",
479        "high"
480      ],
481      "default_reasoning_effort": "medium",
482      "supports_attachments": true
483    },
484    {
485      "id": "gemma-4-uncensored",
486      "name": "Gemma 4 Uncensored",
487      "cost_per_1m_in": 0.1625,
488      "cost_per_1m_out": 0.5,
489      "cost_per_1m_in_cached": 0,
490      "cost_per_1m_out_cached": 0,
491      "context_window": 256000,
492      "default_max_tokens": 8192,
493      "can_reason": false,
494      "supports_attachments": true
495    },
496    {
497      "id": "google-gemma-3-27b-it",
498      "name": "Google Gemma 3 27B Instruct",
499      "cost_per_1m_in": 0.12,
500      "cost_per_1m_out": 0.2,
501      "cost_per_1m_in_cached": 0,
502      "cost_per_1m_out_cached": 0,
503      "context_window": 198000,
504      "default_max_tokens": 16384,
505      "can_reason": false,
506      "supports_attachments": true
507    },
508    {
509      "id": "google-gemma-4-26b-a4b-it",
510      "name": "Google Gemma 4 26B A4B Instruct",
511      "cost_per_1m_in": 0.1625,
512      "cost_per_1m_out": 0.5,
513      "cost_per_1m_in_cached": 0,
514      "cost_per_1m_out_cached": 0,
515      "context_window": 256000,
516      "default_max_tokens": 8192,
517      "can_reason": true,
518      "supports_attachments": true
519    },
520    {
521      "id": "google-gemma-4-31b-it",
522      "name": "Google Gemma 4 31B Instruct",
523      "cost_per_1m_in": 0.175,
524      "cost_per_1m_out": 0.5,
525      "cost_per_1m_in_cached": 0,
526      "cost_per_1m_out_cached": 0,
527      "context_window": 256000,
528      "default_max_tokens": 8192,
529      "can_reason": true,
530      "supports_attachments": true
531    },
532    {
533      "id": "grok-41-fast",
534      "name": "Grok 4.1 Fast",
535      "cost_per_1m_in": 0.23,
536      "cost_per_1m_out": 0.57,
537      "cost_per_1m_in_cached": 0,
538      "cost_per_1m_out_cached": 0,
539      "context_window": 1000000,
540      "default_max_tokens": 30000,
541      "can_reason": true,
542      "supports_attachments": true
543    },
544    {
545      "id": "grok-4-20",
546      "name": "Grok 4.20",
547      "cost_per_1m_in": 2.27,
548      "cost_per_1m_out": 6.8,
549      "cost_per_1m_in_cached": 0,
550      "cost_per_1m_out_cached": 0,
551      "context_window": 2000000,
552      "default_max_tokens": 128000,
553      "can_reason": true,
554      "supports_attachments": true
555    },
556    {
557      "id": "kimi-k2-thinking",
558      "name": "Kimi K2 Thinking",
559      "cost_per_1m_in": 0.75,
560      "cost_per_1m_out": 3.2,
561      "cost_per_1m_in_cached": 0,
562      "cost_per_1m_out_cached": 0,
563      "context_window": 256000,
564      "default_max_tokens": 65536,
565      "can_reason": true,
566      "reasoning_levels": [
567        "low",
568        "medium",
569        "high"
570      ],
571      "default_reasoning_effort": "medium",
572      "supports_attachments": false
573    },
574    {
575      "id": "kimi-k2-5",
576      "name": "Kimi K2.5",
577      "cost_per_1m_in": 0.56,
578      "cost_per_1m_out": 3.5,
579      "cost_per_1m_in_cached": 0,
580      "cost_per_1m_out_cached": 0,
581      "context_window": 256000,
582      "default_max_tokens": 65536,
583      "can_reason": true,
584      "reasoning_levels": [
585        "low",
586        "medium",
587        "high"
588      ],
589      "default_reasoning_effort": "medium",
590      "supports_attachments": true
591    },
592    {
593      "id": "kimi-k2-6",
594      "name": "Kimi K2.6",
595      "cost_per_1m_in": 0.7448,
596      "cost_per_1m_out": 4.655,
597      "cost_per_1m_in_cached": 0,
598      "cost_per_1m_out_cached": 0,
599      "context_window": 256000,
600      "default_max_tokens": 65536,
601      "can_reason": true,
602      "reasoning_levels": [
603        "low",
604        "medium",
605        "high"
606      ],
607      "default_reasoning_effort": "medium",
608      "supports_attachments": true
609    },
610    {
611      "id": "llama-3.2-3b",
612      "name": "Llama 3.2 3B",
613      "cost_per_1m_in": 0.15,
614      "cost_per_1m_out": 0.6,
615      "cost_per_1m_in_cached": 0,
616      "cost_per_1m_out_cached": 0,
617      "context_window": 128000,
618      "default_max_tokens": 4096,
619      "can_reason": false,
620      "supports_attachments": false
621    },
622    {
623      "id": "llama-3.3-70b",
624      "name": "Llama 3.3 70B",
625      "cost_per_1m_in": 0.7,
626      "cost_per_1m_out": 2.8,
627      "cost_per_1m_in_cached": 0,
628      "cost_per_1m_out_cached": 0,
629      "context_window": 128000,
630      "default_max_tokens": 4096,
631      "can_reason": false,
632      "supports_attachments": false
633    },
634    {
635      "id": "mercury-2",
636      "name": "Mercury 2",
637      "cost_per_1m_in": 0.3125,
638      "cost_per_1m_out": 0.9375,
639      "cost_per_1m_in_cached": 0,
640      "cost_per_1m_out_cached": 0,
641      "context_window": 128000,
642      "default_max_tokens": 50000,
643      "can_reason": true,
644      "reasoning_levels": [
645        "low",
646        "medium",
647        "high"
648      ],
649      "default_reasoning_effort": "medium",
650      "supports_attachments": false
651    },
652    {
653      "id": "minimax-m25",
654      "name": "MiniMax M2.5",
655      "cost_per_1m_in": 0.34,
656      "cost_per_1m_out": 1.19,
657      "cost_per_1m_in_cached": 0,
658      "cost_per_1m_out_cached": 0,
659      "context_window": 198000,
660      "default_max_tokens": 32768,
661      "can_reason": true,
662      "reasoning_levels": [
663        "low",
664        "medium",
665        "high"
666      ],
667      "default_reasoning_effort": "medium",
668      "supports_attachments": false
669    },
670    {
671      "id": "minimax-m27",
672      "name": "MiniMax M2.7",
673      "cost_per_1m_in": 0.375,
674      "cost_per_1m_out": 1.5,
675      "cost_per_1m_in_cached": 0,
676      "cost_per_1m_out_cached": 0,
677      "context_window": 198000,
678      "default_max_tokens": 32768,
679      "can_reason": true,
680      "reasoning_levels": [
681        "low",
682        "medium",
683        "high"
684      ],
685      "default_reasoning_effort": "medium",
686      "supports_attachments": false
687    },
688    {
689      "id": "mistral-small-3-2-24b-instruct",
690      "name": "Mistral Small 3.2 24B Instruct",
691      "cost_per_1m_in": 0.09375,
692      "cost_per_1m_out": 0.25,
693      "cost_per_1m_in_cached": 0,
694      "cost_per_1m_out_cached": 0,
695      "context_window": 256000,
696      "default_max_tokens": 16384,
697      "can_reason": false,
698      "supports_attachments": false
699    },
700    {
701      "id": "mistral-small-2603",
702      "name": "Mistral Small 4",
703      "cost_per_1m_in": 0.1875,
704      "cost_per_1m_out": 0.75,
705      "cost_per_1m_in_cached": 0,
706      "cost_per_1m_out_cached": 0,
707      "context_window": 256000,
708      "default_max_tokens": 65536,
709      "can_reason": true,
710      "reasoning_levels": [
711        "low",
712        "medium",
713        "high"
714      ],
715      "default_reasoning_effort": "medium",
716      "supports_attachments": true
717    },
718    {
719      "id": "nvidia-nemotron-3-nano-30b-a3b",
720      "name": "NVIDIA Nemotron 3 Nano 30B",
721      "cost_per_1m_in": 0.075,
722      "cost_per_1m_out": 0.3,
723      "cost_per_1m_in_cached": 0,
724      "cost_per_1m_out_cached": 0,
725      "context_window": 128000,
726      "default_max_tokens": 16384,
727      "can_reason": false,
728      "supports_attachments": false
729    },
730    {
731      "id": "nvidia-nemotron-cascade-2-30b-a3b",
732      "name": "Nemotron Cascade 2 30B A3B",
733      "cost_per_1m_in": 0.14,
734      "cost_per_1m_out": 0.8,
735      "cost_per_1m_in_cached": 0,
736      "cost_per_1m_out_cached": 0,
737      "context_window": 256000,
738      "default_max_tokens": 32768,
739      "can_reason": true,
740      "supports_attachments": false
741    },
742    {
743      "id": "openai-gpt-oss-120b",
744      "name": "OpenAI GPT OSS 120B",
745      "cost_per_1m_in": 0.07,
746      "cost_per_1m_out": 0.3,
747      "cost_per_1m_in_cached": 0,
748      "cost_per_1m_out_cached": 0,
749      "context_window": 128000,
750      "default_max_tokens": 16384,
751      "can_reason": false,
752      "supports_attachments": false
753    },
754    {
755      "id": "qwen3-235b-a22b-instruct-2507",
756      "name": "Qwen 3 235B A22B Instruct 2507",
757      "cost_per_1m_in": 0.15,
758      "cost_per_1m_out": 0.75,
759      "cost_per_1m_in_cached": 0,
760      "cost_per_1m_out_cached": 0,
761      "context_window": 128000,
762      "default_max_tokens": 16384,
763      "can_reason": false,
764      "supports_attachments": false
765    },
766    {
767      "id": "qwen3-235b-a22b-thinking-2507",
768      "name": "Qwen 3 235B A22B Thinking 2507",
769      "cost_per_1m_in": 0.45,
770      "cost_per_1m_out": 3.5,
771      "cost_per_1m_in_cached": 0,
772      "cost_per_1m_out_cached": 0,
773      "context_window": 128000,
774      "default_max_tokens": 16384,
775      "can_reason": true,
776      "supports_attachments": false
777    },
778    {
779      "id": "qwen3-coder-480b-a35b-instruct-turbo",
780      "name": "Qwen 3 Coder 480B Turbo",
781      "cost_per_1m_in": 0.35,
782      "cost_per_1m_out": 1.5,
783      "cost_per_1m_in_cached": 0,
784      "cost_per_1m_out_cached": 0,
785      "context_window": 256000,
786      "default_max_tokens": 65536,
787      "can_reason": false,
788      "supports_attachments": false
789    },
790    {
791      "id": "qwen3-coder-480b-a35b-instruct",
792      "name": "Qwen 3 Coder 480b",
793      "cost_per_1m_in": 0.75,
794      "cost_per_1m_out": 3,
795      "cost_per_1m_in_cached": 0,
796      "cost_per_1m_out_cached": 0,
797      "context_window": 256000,
798      "default_max_tokens": 65536,
799      "can_reason": false,
800      "supports_attachments": false
801    },
802    {
803      "id": "qwen3-next-80b",
804      "name": "Qwen 3 Next 80b",
805      "cost_per_1m_in": 0.35,
806      "cost_per_1m_out": 1.9,
807      "cost_per_1m_in_cached": 0,
808      "cost_per_1m_out_cached": 0,
809      "context_window": 256000,
810      "default_max_tokens": 16384,
811      "can_reason": false,
812      "supports_attachments": false
813    },
814    {
815      "id": "qwen3-5-35b-a3b",
816      "name": "Qwen 3.5 35B A3B",
817      "cost_per_1m_in": 0.3125,
818      "cost_per_1m_out": 1.25,
819      "cost_per_1m_in_cached": 0,
820      "cost_per_1m_out_cached": 0,
821      "context_window": 256000,
822      "default_max_tokens": 65536,
823      "can_reason": true,
824      "supports_attachments": true,
825      "options": {
826        "temperature": 1,
827        "top_p": 0.95
828      }
829    },
830    {
831      "id": "qwen3-5-397b-a17b",
832      "name": "Qwen 3.5 397B",
833      "cost_per_1m_in": 0.75,
834      "cost_per_1m_out": 4.5,
835      "cost_per_1m_in_cached": 0,
836      "cost_per_1m_out_cached": 0,
837      "context_window": 128000,
838      "default_max_tokens": 32768,
839      "can_reason": true,
840      "reasoning_levels": [
841        "low",
842        "medium",
843        "high"
844      ],
845      "default_reasoning_effort": "medium",
846      "supports_attachments": true
847    },
848    {
849      "id": "qwen3-5-9b",
850      "name": "Qwen 3.5 9B",
851      "cost_per_1m_in": 0.1,
852      "cost_per_1m_out": 0.15,
853      "cost_per_1m_in_cached": 0,
854      "cost_per_1m_out_cached": 0,
855      "context_window": 256000,
856      "default_max_tokens": 32768,
857      "can_reason": true,
858      "supports_attachments": true
859    },
860    {
861      "id": "qwen-3-6-plus",
862      "name": "Qwen 3.6 Plus Uncensored",
863      "cost_per_1m_in": 0.625,
864      "cost_per_1m_out": 3.75,
865      "cost_per_1m_in_cached": 0,
866      "cost_per_1m_out_cached": 0,
867      "context_window": 1000000,
868      "default_max_tokens": 65536,
869      "can_reason": true,
870      "supports_attachments": true,
871      "options": {
872        "temperature": 0.7,
873        "top_p": 0.8
874      }
875    },
876    {
877      "id": "e2ee-qwen3-30b-a3b-p",
878      "name": "Qwen3 30B A3B",
879      "cost_per_1m_in": 0.19,
880      "cost_per_1m_out": 0.69,
881      "cost_per_1m_in_cached": 0,
882      "cost_per_1m_out_cached": 0,
883      "context_window": 256000,
884      "default_max_tokens": 32768,
885      "can_reason": false,
886      "supports_attachments": false
887    },
888    {
889      "id": "qwen3-vl-235b-a22b",
890      "name": "Qwen3 VL 235B",
891      "cost_per_1m_in": 0.25,
892      "cost_per_1m_out": 1.5,
893      "cost_per_1m_in_cached": 0,
894      "cost_per_1m_out_cached": 0,
895      "context_window": 256000,
896      "default_max_tokens": 16384,
897      "can_reason": false,
898      "supports_attachments": true
899    },
900    {
901      "id": "e2ee-qwen3-vl-30b-a3b-p",
902      "name": "Qwen3 VL 30B A3B",
903      "cost_per_1m_in": 0.25,
904      "cost_per_1m_out": 0.9,
905      "cost_per_1m_in_cached": 0,
906      "cost_per_1m_out_cached": 0,
907      "context_window": 128000,
908      "default_max_tokens": 4096,
909      "can_reason": false,
910      "supports_attachments": true
911    },
912    {
913      "id": "e2ee-qwen3-5-122b-a10b",
914      "name": "Qwen3.5 122B A10B",
915      "cost_per_1m_in": 0.5,
916      "cost_per_1m_out": 4,
917      "cost_per_1m_in_cached": 0,
918      "cost_per_1m_out_cached": 0,
919      "context_window": 128000,
920      "default_max_tokens": 32768,
921      "can_reason": true,
922      "supports_attachments": true
923    },
924    {
925      "id": "qwen3-6-27b",
926      "name": "Qwen3.6 27B",
927      "cost_per_1m_in": 0.325,
928      "cost_per_1m_out": 3.25,
929      "cost_per_1m_in_cached": 0,
930      "cost_per_1m_out_cached": 0,
931      "context_window": 256000,
932      "default_max_tokens": 65536,
933      "can_reason": true,
934      "supports_attachments": true,
935      "options": {
936        "temperature": 1,
937        "top_p": 0.95
938      }
939    },
940    {
941      "id": "arcee-trinity-large-thinking",
942      "name": "Trinity Large Thinking",
943      "cost_per_1m_in": 0.3125,
944      "cost_per_1m_out": 1.125,
945      "cost_per_1m_in_cached": 0,
946      "cost_per_1m_out_cached": 0,
947      "context_window": 256000,
948      "default_max_tokens": 65536,
949      "can_reason": true,
950      "reasoning_levels": [
951        "low",
952        "medium",
953        "high"
954      ],
955      "default_reasoning_effort": "medium",
956      "supports_attachments": false
957    },
958    {
959      "id": "venice-uncensored-role-play",
960      "name": "Venice Role Play Uncensored",
961      "cost_per_1m_in": 0.5,
962      "cost_per_1m_out": 2,
963      "cost_per_1m_in_cached": 0,
964      "cost_per_1m_out_cached": 0,
965      "context_window": 128000,
966      "default_max_tokens": 4096,
967      "can_reason": false,
968      "supports_attachments": true
969    },
970    {
971      "id": "venice-uncensored-1-2",
972      "name": "Venice Uncensored 1.2",
973      "cost_per_1m_in": 0.2,
974      "cost_per_1m_out": 0.9,
975      "cost_per_1m_in_cached": 0,
976      "cost_per_1m_out_cached": 0,
977      "context_window": 128000,
978      "default_max_tokens": 8192,
979      "can_reason": false,
980      "supports_attachments": true
981    }
982  ]
983}