{
  "name": "Venice AI",
  "id": "venice",
  "api_key": "$VENICE_API_KEY",
  "api_endpoint": "https://api.venice.ai/api/v1",
  "type": "openai-compat",
  "default_large_model_id": "claude-opus-4-6",
  "default_small_model_id": "minimax-m25",
  "models": [
    {
      "id": "claude-opus-4-5",
      "name": "Claude Opus 4.5",
      "cost_per_1m_in": 6,
      "cost_per_1m_out": 30,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 198000,
      "default_max_tokens": 32768,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true,
      "options": {}
    },
    {
      "id": "claude-opus-4-6",
      "name": "Claude Opus 4.6",
      "cost_per_1m_in": 6,
      "cost_per_1m_out": 30,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 1000000,
      "default_max_tokens": 32768,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true,
      "options": {}
    },
    {
      "id": "claude-sonnet-4-5",
      "name": "Claude Sonnet 4.5",
      "cost_per_1m_in": 3.75,
      "cost_per_1m_out": 18.75,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 198000,
      "default_max_tokens": 32768,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true,
      "options": {}
    },
    {
      "id": "claude-sonnet-4-6",
      "name": "Claude Sonnet 4.6",
      "cost_per_1m_in": 3.6,
      "cost_per_1m_out": 18,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 1000000,
      "default_max_tokens": 32768,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true,
      "options": {}
    },
    {
      "id": "deepseek-v3.2",
      "name": "DeepSeek V3.2",
      "cost_per_1m_in": 0.33,
      "cost_per_1m_out": 0.48,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 160000,
      "default_max_tokens": 32768,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false,
      "options": {}
    },
    {
      "id": "zai-org-glm-4.6",
      "name": "GLM 4.6",
      "cost_per_1m_in": 0.85,
      "cost_per_1m_out": 2.75,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 198000,
      "default_max_tokens": 32768,
      "can_reason": false,
      "supports_attachments": false,
      "options": {}
    },
    {
      "id": "zai-org-glm-4.7",
      "name": "GLM 4.7",
      "cost_per_1m_in": 0.55,
      "cost_per_1m_out": 2.65,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 198000,
      "default_max_tokens": 32768,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false,
      "options": {}
    },
    {
      "id": "zai-org-glm-4.7-flash",
      "name": "GLM 4.7 Flash",
      "cost_per_1m_in": 0.125,
      "cost_per_1m_out": 0.5,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 32000,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false,
      "options": {}
    },
    {
      "id": "olafangensan-glm-4.7-flash-heretic",
      "name": "GLM 4.7 Flash Heretic",
      "cost_per_1m_in": 0.14,
      "cost_per_1m_out": 0.8,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 200000,
      "default_max_tokens": 32768,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false,
      "options": {}
    },
    {
      "id": "zai-org-glm-5",
      "name": "GLM 5",
      "cost_per_1m_in": 1,
      "cost_per_1m_out": 3.2,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 198000,
      "default_max_tokens": 32768,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false,
      "options": {}
    },
    {
      "id": "openai-gpt-4o-2024-11-20",
      "name": "GPT-4o",
      "cost_per_1m_in": 3.125,
      "cost_per_1m_out": 12.5,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 32000,
      "can_reason": false,
      "supports_attachments": true,
      "options": {}
    },
    {
      "id": "openai-gpt-4o-mini-2024-07-18",
      "name": "GPT-4o Mini",
      "cost_per_1m_in": 0.1875,
      "cost_per_1m_out": 0.75,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 32000,
      "can_reason": false,
      "supports_attachments": true,
      "options": {}
    },
    {
      "id": "openai-gpt-52",
      "name": "GPT-5.2",
      "cost_per_1m_in": 2.19,
      "cost_per_1m_out": 17.5,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 256000,
      "default_max_tokens": 32768,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false,
      "options": {}
    },
    {
      "id": "openai-gpt-52-codex",
      "name": "GPT-5.2 Codex",
      "cost_per_1m_in": 2.19,
      "cost_per_1m_out": 17.5,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 256000,
      "default_max_tokens": 32768,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true,
      "options": {}
    },
    {
      "id": "openai-gpt-53-codex",
      "name": "GPT-5.3 Codex",
      "cost_per_1m_in": 2.19,
      "cost_per_1m_out": 17.5,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 400000,
      "default_max_tokens": 32768,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true,
      "options": {}
    },
    {
      "id": "openai-gpt-54",
      "name": "GPT-5.4",
      "cost_per_1m_in": 3.13,
      "cost_per_1m_out": 18.8,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 1000000,
      "default_max_tokens": 32768,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true,
      "options": {}
    },
    {
      "id": "openai-gpt-54-pro",
      "name": "GPT-5.4 Pro",
      "cost_per_1m_in": 37.5,
      "cost_per_1m_out": 225,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 1000000,
      "default_max_tokens": 32768,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true,
      "options": {}
    },
    {
      "id": "gemini-3-flash-preview",
      "name": "Gemini 3 Flash Preview",
      "cost_per_1m_in": 0.7,
      "cost_per_1m_out": 3.75,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 256000,
      "default_max_tokens": 32768,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true,
      "options": {}
    },
    {
      "id": "gemini-3-1-pro-preview",
      "name": "Gemini 3.1 Pro Preview",
      "cost_per_1m_in": 2.5,
      "cost_per_1m_out": 15,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 1000000,
      "default_max_tokens": 32768,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true,
      "options": {}
    },
    {
      "id": "google-gemma-3-27b-it",
      "name": "Google Gemma 3 27B Instruct",
      "cost_per_1m_in": 0.12,
      "cost_per_1m_out": 0.2,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 198000,
      "default_max_tokens": 32768,
      "can_reason": false,
      "supports_attachments": true,
      "options": {}
    },
    {
      "id": "grok-41-fast",
      "name": "Grok 4.1 Fast",
      "cost_per_1m_in": 0.25,
      "cost_per_1m_out": 0.625,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 1000000,
      "default_max_tokens": 32768,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true,
      "options": {}
    },
    {
      "id": "grok-4-20-beta",
      "name": "Grok 4.20 Beta",
      "cost_per_1m_in": 2.5,
      "cost_per_1m_out": 7.5,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 2000000,
      "default_max_tokens": 32768,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true,
      "options": {}
    },
    {
      "id": "grok-code-fast-1",
      "name": "Grok Code Fast 1",
      "cost_per_1m_in": 0.25,
      "cost_per_1m_out": 1.87,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 256000,
      "default_max_tokens": 32768,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false,
      "options": {}
    },
    {
      "id": "kimi-k2-thinking",
      "name": "Kimi K2 Thinking",
      "cost_per_1m_in": 0.75,
      "cost_per_1m_out": 3.2,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 256000,
      "default_max_tokens": 32768,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false,
      "options": {}
    },
    {
      "id": "kimi-k2-5",
      "name": "Kimi K2.5",
      "cost_per_1m_in": 0.56,
      "cost_per_1m_out": 3.5,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 256000,
      "default_max_tokens": 32768,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true,
      "options": {}
    },
    {
      "id": "llama-3.2-3b",
      "name": "Llama 3.2 3B",
      "cost_per_1m_in": 0.15,
      "cost_per_1m_out": 0.6,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 32000,
      "can_reason": false,
      "supports_attachments": false,
      "options": {}
    },
    {
      "id": "llama-3.3-70b",
      "name": "Llama 3.3 70B",
      "cost_per_1m_in": 0.7,
      "cost_per_1m_out": 2.8,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 32000,
      "can_reason": false,
      "supports_attachments": false,
      "options": {}
    },
    {
      "id": "minimax-m21",
      "name": "MiniMax M2.1",
      "cost_per_1m_in": 0.35,
      "cost_per_1m_out": 1.5,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 198000,
      "default_max_tokens": 32768,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false,
      "options": {}
    },
    {
      "id": "minimax-m25",
      "name": "MiniMax M2.5",
      "cost_per_1m_in": 0.34,
      "cost_per_1m_out": 1.19,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 198000,
      "default_max_tokens": 32768,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false,
      "options": {}
    },
    {
      "id": "minimax-m27",
      "name": "MiniMax M2.7",
      "cost_per_1m_in": 0.375,
      "cost_per_1m_out": 1.5,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 198000,
      "default_max_tokens": 32768,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false,
      "options": {}
    },
    {
      "id": "mistral-small-3-2-24b-instruct",
      "name": "Mistral Small 3.2 24B Instruct",
      "cost_per_1m_in": 0.09375,
      "cost_per_1m_out": 0.25,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 256000,
      "default_max_tokens": 32768,
      "can_reason": false,
      "supports_attachments": false,
      "options": {}
    },
    {
      "id": "nvidia-nemotron-3-nano-30b-a3b",
      "name": "NVIDIA Nemotron 3 Nano 30B",
      "cost_per_1m_in": 0.075,
      "cost_per_1m_out": 0.3,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 32000,
      "can_reason": false,
      "supports_attachments": false,
      "options": {}
    },
    {
      "id": "openai-gpt-oss-120b",
      "name": "OpenAI GPT OSS 120B",
      "cost_per_1m_in": 0.07,
      "cost_per_1m_out": 0.3,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 32000,
      "can_reason": false,
      "supports_attachments": false,
      "options": {}
    },
    {
      "id": "qwen3-235b-a22b-instruct-2507",
      "name": "Qwen 3 235B A22B Instruct 2507",
      "cost_per_1m_in": 0.15,
      "cost_per_1m_out": 0.75,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 32000,
      "can_reason": false,
      "supports_attachments": false,
      "options": {}
    },
    {
      "id": "qwen3-235b-a22b-thinking-2507",
      "name": "Qwen 3 235B A22B Thinking 2507",
      "cost_per_1m_in": 0.45,
      "cost_per_1m_out": 3.5,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 32000,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false,
      "options": {}
    },
    {
      "id": "qwen3-coder-480b-a35b-instruct-turbo",
      "name": "Qwen 3 Coder 480B Turbo",
      "cost_per_1m_in": 0.35,
      "cost_per_1m_out": 1.5,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 256000,
      "default_max_tokens": 32768,
      "can_reason": false,
      "supports_attachments": false,
      "options": {}
    },
    {
      "id": "qwen3-coder-480b-a35b-instruct",
      "name": "Qwen 3 Coder 480b",
      "cost_per_1m_in": 0.75,
      "cost_per_1m_out": 3,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 256000,
      "default_max_tokens": 32768,
      "can_reason": false,
      "supports_attachments": false,
      "options": {}
    },
    {
      "id": "qwen3-next-80b",
      "name": "Qwen 3 Next 80b",
      "cost_per_1m_in": 0.35,
      "cost_per_1m_out": 1.9,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 256000,
      "default_max_tokens": 32768,
      "can_reason": false,
      "supports_attachments": false,
      "options": {}
    },
    {
      "id": "qwen3-5-35b-a3b",
      "name": "Qwen 3.5 35B A3B",
      "cost_per_1m_in": 0.3125,
      "cost_per_1m_out": 1.25,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 256000,
      "default_max_tokens": 32768,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true,
      "options": {
        "temperature": 1,
        "top_p": 0.95
      }
    },
    {
      "id": "qwen3-5-9b",
      "name": "Qwen 3.5 9B",
      "cost_per_1m_in": 0.05,
      "cost_per_1m_out": 0.15,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 256000,
      "default_max_tokens": 32768,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true,
      "options": {}
    },
    {
      "id": "qwen3-vl-235b-a22b",
      "name": "Qwen3 VL 235B",
      "cost_per_1m_in": 0.25,
      "cost_per_1m_out": 1.5,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 256000,
      "default_max_tokens": 32768,
      "can_reason": false,
      "supports_attachments": true,
      "options": {}
    },
    {
      "id": "mistral-31-24b",
      "name": "Venice Medium",
      "cost_per_1m_in": 0.5,
      "cost_per_1m_out": 2,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 32000,
      "can_reason": false,
      "supports_attachments": true,
      "options": {}
    },
    {
      "id": "venice-uncensored-role-play",
      "name": "Venice Role Play Uncensored",
      "cost_per_1m_in": 0.5,
      "cost_per_1m_out": 2,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 32000,
      "can_reason": false,
      "supports_attachments": true,
      "options": {}
    },
    {
      "id": "qwen3-4b",
      "name": "Venice Small",
      "cost_per_1m_in": 0.05,
      "cost_per_1m_out": 0.15,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 32000,
      "default_max_tokens": 8000,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false,
      "options": {}
    }
  ]
}