venice.json

  1{
  2  "name": "Venice AI",
  3  "id": "venice",
  4  "api_key": "$VENICE_API_KEY",
  5  "api_endpoint": "https://api.venice.ai/api/v1",
  6  "type": "openai-compat",
  7  "default_large_model_id": "claude-opus-4-6-fast",
  8  "default_small_model_id": "deepseek-v4-flash",
  9  "models": [
 10    {
 11      "id": "claude-opus-4-5",
 12      "name": "Claude Opus 4.5",
 13      "cost_per_1m_in": 6,
 14      "cost_per_1m_out": 30,
 15      "cost_per_1m_in_cached": 0,
 16      "cost_per_1m_out_cached": 0,
 17      "context_window": 198000,
 18      "default_max_tokens": 32768,
 19      "can_reason": true,
 20      "reasoning_levels": [
 21        "low",
 22        "medium",
 23        "high"
 24      ],
 25      "default_reasoning_effort": "medium",
 26      "supports_attachments": true
 27    },
 28    {
 29      "id": "claude-opus-4-6",
 30      "name": "Claude Opus 4.6",
 31      "cost_per_1m_in": 6,
 32      "cost_per_1m_out": 30,
 33      "cost_per_1m_in_cached": 0,
 34      "cost_per_1m_out_cached": 0,
 35      "context_window": 1000000,
 36      "default_max_tokens": 128000,
 37      "can_reason": true,
 38      "reasoning_levels": [
 39        "low",
 40        "medium",
 41        "high"
 42      ],
 43      "default_reasoning_effort": "medium",
 44      "supports_attachments": true
 45    },
 46    {
 47      "id": "claude-opus-4-6-fast",
 48      "name": "Claude Opus 4.6 Fast",
 49      "cost_per_1m_in": 36,
 50      "cost_per_1m_out": 180,
 51      "cost_per_1m_in_cached": 0,
 52      "cost_per_1m_out_cached": 0,
 53      "context_window": 1000000,
 54      "default_max_tokens": 128000,
 55      "can_reason": true,
 56      "reasoning_levels": [
 57        "low",
 58        "medium",
 59        "high"
 60      ],
 61      "default_reasoning_effort": "medium",
 62      "supports_attachments": true
 63    },
 64    {
 65      "id": "claude-opus-4-7",
 66      "name": "Claude Opus 4.7",
 67      "cost_per_1m_in": 6,
 68      "cost_per_1m_out": 30,
 69      "cost_per_1m_in_cached": 0,
 70      "cost_per_1m_out_cached": 0,
 71      "context_window": 1000000,
 72      "default_max_tokens": 128000,
 73      "can_reason": true,
 74      "reasoning_levels": [
 75        "low",
 76        "medium",
 77        "high"
 78      ],
 79      "default_reasoning_effort": "medium",
 80      "supports_attachments": true
 81    },
 82    {
 83      "id": "claude-sonnet-4-5",
 84      "name": "Claude Sonnet 4.5",
 85      "cost_per_1m_in": 3.75,
 86      "cost_per_1m_out": 18.75,
 87      "cost_per_1m_in_cached": 0,
 88      "cost_per_1m_out_cached": 0,
 89      "context_window": 198000,
 90      "default_max_tokens": 64000,
 91      "can_reason": true,
 92      "reasoning_levels": [
 93        "low",
 94        "medium",
 95        "high"
 96      ],
 97      "default_reasoning_effort": "medium",
 98      "supports_attachments": true
 99    },
100    {
101      "id": "claude-sonnet-4-6",
102      "name": "Claude Sonnet 4.6",
103      "cost_per_1m_in": 3.6,
104      "cost_per_1m_out": 18,
105      "cost_per_1m_in_cached": 0,
106      "cost_per_1m_out_cached": 0,
107      "context_window": 1000000,
108      "default_max_tokens": 64000,
109      "can_reason": true,
110      "reasoning_levels": [
111        "low",
112        "medium",
113        "high"
114      ],
115      "default_reasoning_effort": "medium",
116      "supports_attachments": true
117    },
118    {
119      "id": "deepseek-v3.2",
120      "name": "DeepSeek V3.2",
121      "cost_per_1m_in": 0.33,
122      "cost_per_1m_out": 0.48,
123      "cost_per_1m_in_cached": 0,
124      "cost_per_1m_out_cached": 0,
125      "context_window": 160000,
126      "default_max_tokens": 32768,
127      "can_reason": true,
128      "supports_attachments": false
129    },
130    {
131      "id": "deepseek-v4-flash",
132      "name": "DeepSeek V4 Flash",
133      "cost_per_1m_in": 0.17,
134      "cost_per_1m_out": 0.35,
135      "cost_per_1m_in_cached": 0,
136      "cost_per_1m_out_cached": 0,
137      "context_window": 1000000,
138      "default_max_tokens": 32768,
139      "can_reason": true,
140      "reasoning_levels": [
141        "low",
142        "medium",
143        "high"
144      ],
145      "default_reasoning_effort": "medium",
146      "supports_attachments": false
147    },
148    {
149      "id": "deepseek-v4-pro",
150      "name": "DeepSeek V4 Pro",
151      "cost_per_1m_in": 1.73,
152      "cost_per_1m_out": 3.796,
153      "cost_per_1m_in_cached": 0,
154      "cost_per_1m_out_cached": 0,
155      "context_window": 1000000,
156      "default_max_tokens": 32768,
157      "can_reason": true,
158      "reasoning_levels": [
159        "low",
160        "medium",
161        "high"
162      ],
163      "default_reasoning_effort": "medium",
164      "supports_attachments": false
165    },
166    {
167      "id": "zai-org-glm-4.6",
168      "name": "GLM 4.6",
169      "cost_per_1m_in": 0.85,
170      "cost_per_1m_out": 2.75,
171      "cost_per_1m_in_cached": 0,
172      "cost_per_1m_out_cached": 0,
173      "context_window": 198000,
174      "default_max_tokens": 16384,
175      "can_reason": true,
176      "supports_attachments": false
177    },
178    {
179      "id": "zai-org-glm-4.7",
180      "name": "GLM 4.7",
181      "cost_per_1m_in": 0.55,
182      "cost_per_1m_out": 2.65,
183      "cost_per_1m_in_cached": 0,
184      "cost_per_1m_out_cached": 0,
185      "context_window": 198000,
186      "default_max_tokens": 16384,
187      "can_reason": true,
188      "supports_attachments": false
189    },
190    {
191      "id": "zai-org-glm-4.7-flash",
192      "name": "GLM 4.7 Flash",
193      "cost_per_1m_in": 0.125,
194      "cost_per_1m_out": 0.5,
195      "cost_per_1m_in_cached": 0,
196      "cost_per_1m_out_cached": 0,
197      "context_window": 128000,
198      "default_max_tokens": 16384,
199      "can_reason": true,
200      "reasoning_levels": [
201        "low",
202        "medium",
203        "high"
204      ],
205      "default_reasoning_effort": "medium",
206      "supports_attachments": false
207    },
208    {
209      "id": "olafangensan-glm-4.7-flash-heretic",
210      "name": "GLM 4.7 Flash Heretic",
211      "cost_per_1m_in": 0.14,
212      "cost_per_1m_out": 0.8,
213      "cost_per_1m_in_cached": 0,
214      "cost_per_1m_out_cached": 0,
215      "context_window": 200000,
216      "default_max_tokens": 24000,
217      "can_reason": true,
218      "supports_attachments": false
219    },
220    {
221      "id": "zai-org-glm-5",
222      "name": "GLM 5",
223      "cost_per_1m_in": 1,
224      "cost_per_1m_out": 3.2,
225      "cost_per_1m_in_cached": 0,
226      "cost_per_1m_out_cached": 0,
227      "context_window": 198000,
228      "default_max_tokens": 32000,
229      "can_reason": true,
230      "supports_attachments": false
231    },
232    {
233      "id": "z-ai-glm-5-turbo",
234      "name": "GLM 5 Turbo",
235      "cost_per_1m_in": 1.2,
236      "cost_per_1m_out": 4,
237      "cost_per_1m_in_cached": 0,
238      "cost_per_1m_out_cached": 0,
239      "context_window": 200000,
240      "default_max_tokens": 32768,
241      "can_reason": true,
242      "reasoning_levels": [
243        "low",
244        "medium",
245        "high"
246      ],
247      "default_reasoning_effort": "medium",
248      "supports_attachments": false
249    },
250    {
251      "id": "zai-org-glm-5-1",
252      "name": "GLM 5.1",
253      "cost_per_1m_in": 1.75,
254      "cost_per_1m_out": 5.5,
255      "cost_per_1m_in_cached": 0,
256      "cost_per_1m_out_cached": 0,
257      "context_window": 200000,
258      "default_max_tokens": 24000,
259      "can_reason": true,
260      "supports_attachments": false
261    },
262    {
263      "id": "z-ai-glm-5v-turbo",
264      "name": "GLM 5V Turbo",
265      "cost_per_1m_in": 1.5,
266      "cost_per_1m_out": 5,
267      "cost_per_1m_in_cached": 0,
268      "cost_per_1m_out_cached": 0,
269      "context_window": 200000,
270      "default_max_tokens": 32768,
271      "can_reason": true,
272      "reasoning_levels": [
273        "low",
274        "medium",
275        "high"
276      ],
277      "default_reasoning_effort": "medium",
278      "supports_attachments": true
279    },
280    {
281      "id": "openai-gpt-4o-2024-11-20",
282      "name": "GPT-4o",
283      "cost_per_1m_in": 3.125,
284      "cost_per_1m_out": 12.5,
285      "cost_per_1m_in_cached": 0,
286      "cost_per_1m_out_cached": 0,
287      "context_window": 128000,
288      "default_max_tokens": 16384,
289      "can_reason": false,
290      "supports_attachments": true
291    },
292    {
293      "id": "openai-gpt-4o-mini-2024-07-18",
294      "name": "GPT-4o Mini",
295      "cost_per_1m_in": 0.1875,
296      "cost_per_1m_out": 0.75,
297      "cost_per_1m_in_cached": 0,
298      "cost_per_1m_out_cached": 0,
299      "context_window": 128000,
300      "default_max_tokens": 16384,
301      "can_reason": false,
302      "supports_attachments": true
303    },
304    {
305      "id": "openai-gpt-52",
306      "name": "GPT-5.2",
307      "cost_per_1m_in": 2.19,
308      "cost_per_1m_out": 17.5,
309      "cost_per_1m_in_cached": 0,
310      "cost_per_1m_out_cached": 0,
311      "context_window": 256000,
312      "default_max_tokens": 65536,
313      "can_reason": true,
314      "reasoning_levels": [
315        "low",
316        "medium",
317        "high"
318      ],
319      "default_reasoning_effort": "medium",
320      "supports_attachments": false
321    },
322    {
323      "id": "openai-gpt-52-codex",
324      "name": "GPT-5.2 Codex",
325      "cost_per_1m_in": 2.19,
326      "cost_per_1m_out": 17.5,
327      "cost_per_1m_in_cached": 0,
328      "cost_per_1m_out_cached": 0,
329      "context_window": 256000,
330      "default_max_tokens": 65536,
331      "can_reason": true,
332      "reasoning_levels": [
333        "low",
334        "medium",
335        "high"
336      ],
337      "default_reasoning_effort": "medium",
338      "supports_attachments": true
339    },
340    {
341      "id": "openai-gpt-53-codex",
342      "name": "GPT-5.3 Codex",
343      "cost_per_1m_in": 2.19,
344      "cost_per_1m_out": 17.5,
345      "cost_per_1m_in_cached": 0,
346      "cost_per_1m_out_cached": 0,
347      "context_window": 400000,
348      "default_max_tokens": 128000,
349      "can_reason": true,
350      "reasoning_levels": [
351        "low",
352        "medium",
353        "high"
354      ],
355      "default_reasoning_effort": "medium",
356      "supports_attachments": true
357    },
358    {
359      "id": "openai-gpt-54",
360      "name": "GPT-5.4",
361      "cost_per_1m_in": 3.13,
362      "cost_per_1m_out": 18.8,
363      "cost_per_1m_in_cached": 0,
364      "cost_per_1m_out_cached": 0,
365      "context_window": 1000000,
366      "default_max_tokens": 131072,
367      "can_reason": true,
368      "reasoning_levels": [
369        "low",
370        "medium",
371        "high"
372      ],
373      "default_reasoning_effort": "medium",
374      "supports_attachments": true
375    },
376    {
377      "id": "openai-gpt-54-mini",
378      "name": "GPT-5.4 Mini",
379      "cost_per_1m_in": 0.9375,
380      "cost_per_1m_out": 5.625,
381      "cost_per_1m_in_cached": 0,
382      "cost_per_1m_out_cached": 0,
383      "context_window": 400000,
384      "default_max_tokens": 128000,
385      "can_reason": true,
386      "reasoning_levels": [
387        "low",
388        "medium",
389        "high"
390      ],
391      "default_reasoning_effort": "medium",
392      "supports_attachments": true
393    },
394    {
395      "id": "openai-gpt-54-pro",
396      "name": "GPT-5.4 Pro",
397      "cost_per_1m_in": 37.5,
398      "cost_per_1m_out": 225,
399      "cost_per_1m_in_cached": 0,
400      "cost_per_1m_out_cached": 0,
401      "context_window": 1000000,
402      "default_max_tokens": 128000,
403      "can_reason": true,
404      "reasoning_levels": [
405        "low",
406        "medium",
407        "high"
408      ],
409      "default_reasoning_effort": "medium",
410      "supports_attachments": true
411    },
412    {
413      "id": "openai-gpt-55",
414      "name": "GPT-5.5",
415      "cost_per_1m_in": 6.25,
416      "cost_per_1m_out": 37.5,
417      "cost_per_1m_in_cached": 0,
418      "cost_per_1m_out_cached": 0,
419      "context_window": 1000000,
420      "default_max_tokens": 131072,
421      "can_reason": true,
422      "reasoning_levels": [
423        "low",
424        "medium",
425        "high"
426      ],
427      "default_reasoning_effort": "medium",
428      "supports_attachments": true
429    },
430    {
431      "id": "openai-gpt-55-pro",
432      "name": "GPT-5.5 Pro",
433      "cost_per_1m_in": 37.5,
434      "cost_per_1m_out": 225,
435      "cost_per_1m_in_cached": 0,
436      "cost_per_1m_out_cached": 0,
437      "context_window": 1000000,
438      "default_max_tokens": 128000,
439      "can_reason": true,
440      "reasoning_levels": [
441        "low",
442        "medium",
443        "high"
444      ],
445      "default_reasoning_effort": "medium",
446      "supports_attachments": true
447    },
448    {
449      "id": "gemini-3-flash-preview",
450      "name": "Gemini 3 Flash Preview",
451      "cost_per_1m_in": 0.7,
452      "cost_per_1m_out": 3.75,
453      "cost_per_1m_in_cached": 0,
454      "cost_per_1m_out_cached": 0,
455      "context_window": 256000,
456      "default_max_tokens": 65536,
457      "can_reason": true,
458      "reasoning_levels": [
459        "low",
460        "medium",
461        "high"
462      ],
463      "default_reasoning_effort": "medium",
464      "supports_attachments": true
465    },
466    {
467      "id": "gemini-3-1-pro-preview",
468      "name": "Gemini 3.1 Pro Preview",
469      "cost_per_1m_in": 2.5,
470      "cost_per_1m_out": 15,
471      "cost_per_1m_in_cached": 0,
472      "cost_per_1m_out_cached": 0,
473      "context_window": 1000000,
474      "default_max_tokens": 32768,
475      "can_reason": true,
476      "reasoning_levels": [
477        "low",
478        "medium",
479        "high"
480      ],
481      "default_reasoning_effort": "medium",
482      "supports_attachments": true
483    },
484    {
485      "id": "gemma-4-uncensored",
486      "name": "Gemma 4 Uncensored",
487      "cost_per_1m_in": 0.1625,
488      "cost_per_1m_out": 0.5,
489      "cost_per_1m_in_cached": 0,
490      "cost_per_1m_out_cached": 0,
491      "context_window": 256000,
492      "default_max_tokens": 8192,
493      "can_reason": false,
494      "supports_attachments": true
495    },
496    {
497      "id": "google-gemma-3-27b-it",
498      "name": "Google Gemma 3 27B Instruct",
499      "cost_per_1m_in": 0.12,
500      "cost_per_1m_out": 0.2,
501      "cost_per_1m_in_cached": 0,
502      "cost_per_1m_out_cached": 0,
503      "context_window": 198000,
504      "default_max_tokens": 16384,
505      "can_reason": false,
506      "supports_attachments": true
507    },
508    {
509      "id": "google-gemma-4-26b-a4b-it",
510      "name": "Google Gemma 4 26B A4B Instruct",
511      "cost_per_1m_in": 0.1625,
512      "cost_per_1m_out": 0.5,
513      "cost_per_1m_in_cached": 0,
514      "cost_per_1m_out_cached": 0,
515      "context_window": 256000,
516      "default_max_tokens": 8192,
517      "can_reason": true,
518      "supports_attachments": true
519    },
520    {
521      "id": "google-gemma-4-31b-it",
522      "name": "Google Gemma 4 31B Instruct",
523      "cost_per_1m_in": 0.175,
524      "cost_per_1m_out": 0.5,
525      "cost_per_1m_in_cached": 0,
526      "cost_per_1m_out_cached": 0,
527      "context_window": 256000,
528      "default_max_tokens": 8192,
529      "can_reason": true,
530      "supports_attachments": true
531    },
532    {
533      "id": "grok-41-fast",
534      "name": "Grok 4.1 Fast",
535      "cost_per_1m_in": 0.23,
536      "cost_per_1m_out": 0.57,
537      "cost_per_1m_in_cached": 0,
538      "cost_per_1m_out_cached": 0,
539      "context_window": 1000000,
540      "default_max_tokens": 30000,
541      "can_reason": true,
542      "supports_attachments": true
543    },
544    {
545      "id": "grok-4-20",
546      "name": "Grok 4.20",
547      "cost_per_1m_in": 1.42,
548      "cost_per_1m_out": 2.83,
549      "cost_per_1m_in_cached": 0,
550      "cost_per_1m_out_cached": 0,
551      "context_window": 2000000,
552      "default_max_tokens": 128000,
553      "can_reason": true,
554      "supports_attachments": true
555    },
556    {
557      "id": "grok-4-3",
558      "name": "Grok 4.3",
559      "cost_per_1m_in": 1.42,
560      "cost_per_1m_out": 2.83,
561      "cost_per_1m_in_cached": 0,
562      "cost_per_1m_out_cached": 0,
563      "context_window": 1000000,
564      "default_max_tokens": 32000,
565      "can_reason": true,
566      "supports_attachments": true
567    },
568    {
569      "id": "kimi-k2-5",
570      "name": "Kimi K2.5",
571      "cost_per_1m_in": 0.56,
572      "cost_per_1m_out": 3.5,
573      "cost_per_1m_in_cached": 0,
574      "cost_per_1m_out_cached": 0,
575      "context_window": 256000,
576      "default_max_tokens": 65536,
577      "can_reason": true,
578      "reasoning_levels": [
579        "low",
580        "medium",
581        "high"
582      ],
583      "default_reasoning_effort": "medium",
584      "supports_attachments": true
585    },
586    {
587      "id": "kimi-k2-6",
588      "name": "Kimi K2.6",
589      "cost_per_1m_in": 0.85,
590      "cost_per_1m_out": 4.655,
591      "cost_per_1m_in_cached": 0,
592      "cost_per_1m_out_cached": 0,
593      "context_window": 256000,
594      "default_max_tokens": 65536,
595      "can_reason": true,
596      "reasoning_levels": [
597        "low",
598        "medium",
599        "high"
600      ],
601      "default_reasoning_effort": "medium",
602      "supports_attachments": true
603    },
604    {
605      "id": "llama-3.2-3b",
606      "name": "Llama 3.2 3B",
607      "cost_per_1m_in": 0.15,
608      "cost_per_1m_out": 0.6,
609      "cost_per_1m_in_cached": 0,
610      "cost_per_1m_out_cached": 0,
611      "context_window": 128000,
612      "default_max_tokens": 4096,
613      "can_reason": false,
614      "supports_attachments": false
615    },
616    {
617      "id": "llama-3.3-70b",
618      "name": "Llama 3.3 70B",
619      "cost_per_1m_in": 0.7,
620      "cost_per_1m_out": 2.8,
621      "cost_per_1m_in_cached": 0,
622      "cost_per_1m_out_cached": 0,
623      "context_window": 128000,
624      "default_max_tokens": 4096,
625      "can_reason": false,
626      "supports_attachments": false
627    },
628    {
629      "id": "mercury-2",
630      "name": "Mercury 2",
631      "cost_per_1m_in": 0.3125,
632      "cost_per_1m_out": 0.9375,
633      "cost_per_1m_in_cached": 0,
634      "cost_per_1m_out_cached": 0,
635      "context_window": 128000,
636      "default_max_tokens": 50000,
637      "can_reason": true,
638      "reasoning_levels": [
639        "low",
640        "medium",
641        "high"
642      ],
643      "default_reasoning_effort": "medium",
644      "supports_attachments": false
645    },
646    {
647      "id": "minimax-m25",
648      "name": "MiniMax M2.5",
649      "cost_per_1m_in": 0.34,
650      "cost_per_1m_out": 1.19,
651      "cost_per_1m_in_cached": 0,
652      "cost_per_1m_out_cached": 0,
653      "context_window": 198000,
654      "default_max_tokens": 32768,
655      "can_reason": true,
656      "reasoning_levels": [
657        "low",
658        "medium",
659        "high"
660      ],
661      "default_reasoning_effort": "medium",
662      "supports_attachments": false
663    },
664    {
665      "id": "minimax-m27",
666      "name": "MiniMax M2.7",
667      "cost_per_1m_in": 0.375,
668      "cost_per_1m_out": 1.5,
669      "cost_per_1m_in_cached": 0,
670      "cost_per_1m_out_cached": 0,
671      "context_window": 198000,
672      "default_max_tokens": 32768,
673      "can_reason": true,
674      "reasoning_levels": [
675        "low",
676        "medium",
677        "high"
678      ],
679      "default_reasoning_effort": "medium",
680      "supports_attachments": false
681    },
682    {
683      "id": "mistral-small-3-2-24b-instruct",
684      "name": "Mistral Small 3.2 24B Instruct",
685      "cost_per_1m_in": 0.09375,
686      "cost_per_1m_out": 0.25,
687      "cost_per_1m_in_cached": 0,
688      "cost_per_1m_out_cached": 0,
689      "context_window": 256000,
690      "default_max_tokens": 16384,
691      "can_reason": false,
692      "supports_attachments": false
693    },
694    {
695      "id": "mistral-small-2603",
696      "name": "Mistral Small 4",
697      "cost_per_1m_in": 0.1875,
698      "cost_per_1m_out": 0.75,
699      "cost_per_1m_in_cached": 0,
700      "cost_per_1m_out_cached": 0,
701      "context_window": 256000,
702      "default_max_tokens": 65536,
703      "can_reason": true,
704      "reasoning_levels": [
705        "low",
706        "medium",
707        "high"
708      ],
709      "default_reasoning_effort": "medium",
710      "supports_attachments": true
711    },
712    {
713      "id": "nvidia-nemotron-3-nano-30b-a3b",
714      "name": "NVIDIA Nemotron 3 Nano 30B",
715      "cost_per_1m_in": 0.075,
716      "cost_per_1m_out": 0.3,
717      "cost_per_1m_in_cached": 0,
718      "cost_per_1m_out_cached": 0,
719      "context_window": 128000,
720      "default_max_tokens": 16384,
721      "can_reason": false,
722      "supports_attachments": false
723    },
724    {
725      "id": "nvidia-nemotron-cascade-2-30b-a3b",
726      "name": "Nemotron Cascade 2 30B A3B",
727      "cost_per_1m_in": 0.14,
728      "cost_per_1m_out": 0.8,
729      "cost_per_1m_in_cached": 0,
730      "cost_per_1m_out_cached": 0,
731      "context_window": 256000,
732      "default_max_tokens": 32768,
733      "can_reason": true,
734      "supports_attachments": false
735    },
736    {
737      "id": "openai-gpt-oss-120b",
738      "name": "OpenAI GPT OSS 120B",
739      "cost_per_1m_in": 0.07,
740      "cost_per_1m_out": 0.3,
741      "cost_per_1m_in_cached": 0,
742      "cost_per_1m_out_cached": 0,
743      "context_window": 128000,
744      "default_max_tokens": 16384,
745      "can_reason": true,
746      "supports_attachments": false
747    },
748    {
749      "id": "qwen3-235b-a22b-instruct-2507",
750      "name": "Qwen 3 235B A22B Instruct 2507",
751      "cost_per_1m_in": 0.15,
752      "cost_per_1m_out": 0.75,
753      "cost_per_1m_in_cached": 0,
754      "cost_per_1m_out_cached": 0,
755      "context_window": 128000,
756      "default_max_tokens": 16384,
757      "can_reason": false,
758      "supports_attachments": false
759    },
760    {
761      "id": "qwen3-235b-a22b-thinking-2507",
762      "name": "Qwen 3 235B A22B Thinking 2507",
763      "cost_per_1m_in": 0.45,
764      "cost_per_1m_out": 3.5,
765      "cost_per_1m_in_cached": 0,
766      "cost_per_1m_out_cached": 0,
767      "context_window": 128000,
768      "default_max_tokens": 16384,
769      "can_reason": true,
770      "supports_attachments": false
771    },
772    {
773      "id": "qwen3-coder-480b-a35b-instruct-turbo",
774      "name": "Qwen 3 Coder 480B Turbo",
775      "cost_per_1m_in": 0.35,
776      "cost_per_1m_out": 1.5,
777      "cost_per_1m_in_cached": 0,
778      "cost_per_1m_out_cached": 0,
779      "context_window": 256000,
780      "default_max_tokens": 65536,
781      "can_reason": false,
782      "supports_attachments": false
783    },
784    {
785      "id": "qwen3-next-80b",
786      "name": "Qwen 3 Next 80b",
787      "cost_per_1m_in": 0.35,
788      "cost_per_1m_out": 1.9,
789      "cost_per_1m_in_cached": 0,
790      "cost_per_1m_out_cached": 0,
791      "context_window": 256000,
792      "default_max_tokens": 16384,
793      "can_reason": false,
794      "supports_attachments": false
795    },
796    {
797      "id": "qwen3-5-35b-a3b",
798      "name": "Qwen 3.5 35B A3B",
799      "cost_per_1m_in": 0.3125,
800      "cost_per_1m_out": 1.25,
801      "cost_per_1m_in_cached": 0,
802      "cost_per_1m_out_cached": 0,
803      "context_window": 256000,
804      "default_max_tokens": 65536,
805      "can_reason": true,
806      "supports_attachments": true,
807      "options": {
808        "temperature": 1,
809        "top_p": 0.95
810      }
811    },
812    {
813      "id": "qwen3-5-397b-a17b",
814      "name": "Qwen 3.5 397B",
815      "cost_per_1m_in": 0.75,
816      "cost_per_1m_out": 4.5,
817      "cost_per_1m_in_cached": 0,
818      "cost_per_1m_out_cached": 0,
819      "context_window": 128000,
820      "default_max_tokens": 32768,
821      "can_reason": true,
822      "reasoning_levels": [
823        "low",
824        "medium",
825        "high"
826      ],
827      "default_reasoning_effort": "medium",
828      "supports_attachments": true
829    },
830    {
831      "id": "qwen3-5-9b",
832      "name": "Qwen 3.5 9B",
833      "cost_per_1m_in": 0.1,
834      "cost_per_1m_out": 0.15,
835      "cost_per_1m_in_cached": 0,
836      "cost_per_1m_out_cached": 0,
837      "context_window": 256000,
838      "default_max_tokens": 32768,
839      "can_reason": true,
840      "supports_attachments": true
841    },
842    {
843      "id": "qwen3-6-27b",
844      "name": "Qwen 3.6 27B",
845      "cost_per_1m_in": 0.325,
846      "cost_per_1m_out": 3.25,
847      "cost_per_1m_in_cached": 0,
848      "cost_per_1m_out_cached": 0,
849      "context_window": 256000,
850      "default_max_tokens": 65536,
851      "can_reason": true,
852      "supports_attachments": true,
853      "options": {
854        "temperature": 1,
855        "top_p": 0.95
856      }
857    },
858    {
859      "id": "qwen-3-6-plus",
860      "name": "Qwen 3.6 Plus Uncensored",
861      "cost_per_1m_in": 0.625,
862      "cost_per_1m_out": 3.75,
863      "cost_per_1m_in_cached": 0,
864      "cost_per_1m_out_cached": 0,
865      "context_window": 1000000,
866      "default_max_tokens": 65536,
867      "can_reason": true,
868      "supports_attachments": true,
869      "options": {
870        "temperature": 0.7,
871        "top_p": 0.8
872      }
873    },
874    {
875      "id": "e2ee-qwen3-30b-a3b-p",
876      "name": "Qwen3 30B A3B",
877      "cost_per_1m_in": 0.19,
878      "cost_per_1m_out": 0.69,
879      "cost_per_1m_in_cached": 0,
880      "cost_per_1m_out_cached": 0,
881      "context_window": 256000,
882      "default_max_tokens": 32768,
883      "can_reason": false,
884      "supports_attachments": false
885    },
886    {
887      "id": "qwen3-vl-235b-a22b",
888      "name": "Qwen3 VL 235B",
889      "cost_per_1m_in": 0.25,
890      "cost_per_1m_out": 1.5,
891      "cost_per_1m_in_cached": 0,
892      "cost_per_1m_out_cached": 0,
893      "context_window": 256000,
894      "default_max_tokens": 16384,
895      "can_reason": false,
896      "supports_attachments": true
897    },
898    {
899      "id": "e2ee-qwen3-vl-30b-a3b-p",
900      "name": "Qwen3 VL 30B A3B",
901      "cost_per_1m_in": 0.25,
902      "cost_per_1m_out": 0.9,
903      "cost_per_1m_in_cached": 0,
904      "cost_per_1m_out_cached": 0,
905      "context_window": 128000,
906      "default_max_tokens": 4096,
907      "can_reason": false,
908      "supports_attachments": true
909    },
910    {
911      "id": "e2ee-qwen3-5-122b-a10b",
912      "name": "Qwen3.5 122B A10B",
913      "cost_per_1m_in": 0.5,
914      "cost_per_1m_out": 4,
915      "cost_per_1m_in_cached": 0,
916      "cost_per_1m_out_cached": 0,
917      "context_window": 128000,
918      "default_max_tokens": 32768,
919      "can_reason": true,
920      "supports_attachments": true
921    },
922    {
923      "id": "arcee-trinity-large-thinking",
924      "name": "Trinity Large Thinking",
925      "cost_per_1m_in": 0.3125,
926      "cost_per_1m_out": 1.125,
927      "cost_per_1m_in_cached": 0,
928      "cost_per_1m_out_cached": 0,
929      "context_window": 256000,
930      "default_max_tokens": 65536,
931      "can_reason": true,
932      "reasoning_levels": [
933        "low",
934        "medium",
935        "high"
936      ],
937      "default_reasoning_effort": "medium",
938      "supports_attachments": false
939    },
940    {
941      "id": "venice-uncensored-role-play",
942      "name": "Venice Role Play Uncensored",
943      "cost_per_1m_in": 0.5,
944      "cost_per_1m_out": 2,
945      "cost_per_1m_in_cached": 0,
946      "cost_per_1m_out_cached": 0,
947      "context_window": 128000,
948      "default_max_tokens": 4096,
949      "can_reason": false,
950      "supports_attachments": true
951    },
952    {
953      "id": "venice-uncensored-1-2",
954      "name": "Venice Uncensored 1.2",
955      "cost_per_1m_in": 0.2,
956      "cost_per_1m_out": 0.9,
957      "cost_per_1m_in_cached": 0,
958      "cost_per_1m_out_cached": 0,
959      "context_window": 128000,
960      "default_max_tokens": 8192,
961      "can_reason": false,
962      "supports_attachments": true
963    }
964  ]
965}