@@ -0,0 +1,2047 @@
+{
+ "name": "Vercel",
+ "id": "vercel",
+ "api_key": "$VERCEL_API_KEY",
+ "api_endpoint": "https://ai-gateway.vercel.sh/v1",
+ "type": "openai-compat",
+ "default_large_model_id": "anthropic/claude-sonnet-4",
+ "default_small_model_id": "anthropic/claude-haiku-4.5",
+ "models": [
+ {
+ "id": "anthropic/claude-3-haiku",
+ "name": "Claude 3 Haiku",
+ "cost_per_1m_in": 0.25,
+ "cost_per_1m_out": 1.25,
+ "cost_per_1m_in_cached": 0.03,
+ "cost_per_1m_out_cached": 0.3,
+ "context_window": 200000,
+ "default_max_tokens": 4096,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "anthropic/claude-3-opus",
+ "name": "Claude 3 Opus",
+ "cost_per_1m_in": 15,
+ "cost_per_1m_out": 75,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 200000,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "anthropic/claude-3.5-haiku",
+ "name": "Claude 3.5 Haiku",
+ "cost_per_1m_in": 0.7999999999999999,
+ "cost_per_1m_out": 4,
+ "cost_per_1m_in_cached": 0.08,
+ "cost_per_1m_out_cached": 1,
+ "context_window": 200000,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "anthropic/claude-3.5-sonnet",
+ "name": "Claude 3.5 Sonnet",
+ "cost_per_1m_in": 3,
+ "cost_per_1m_out": 15,
+ "cost_per_1m_in_cached": 0.3,
+ "cost_per_1m_out_cached": 3.75,
+ "context_window": 200000,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "anthropic/claude-3.5-sonnet-20240620",
+ "name": "Claude 3.5 Sonnet (2024-06-20)",
+ "cost_per_1m_in": 3,
+ "cost_per_1m_out": 15,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 200000,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "anthropic/claude-3.7-sonnet",
+ "name": "Claude 3.7 Sonnet",
+ "cost_per_1m_in": 3,
+ "cost_per_1m_out": 15,
+ "cost_per_1m_in_cached": 0.3,
+ "cost_per_1m_out_cached": 3.75,
+ "context_window": 200000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "anthropic/claude-haiku-4.5",
+ "name": "Claude Haiku 4.5",
+ "cost_per_1m_in": 1,
+ "cost_per_1m_out": 5,
+ "cost_per_1m_in_cached": 0.09999999999999999,
+ "cost_per_1m_out_cached": 1.25,
+ "context_window": 200000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "anthropic/claude-opus-4",
+ "name": "Claude Opus 4",
+ "cost_per_1m_in": 15,
+ "cost_per_1m_out": 75,
+ "cost_per_1m_in_cached": 1.5,
+ "cost_per_1m_out_cached": 18.75,
+ "context_window": 200000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "anthropic/claude-opus-4.1",
+ "name": "Claude Opus 4.1",
+ "cost_per_1m_in": 15,
+ "cost_per_1m_out": 75,
+ "cost_per_1m_in_cached": 1.5,
+ "cost_per_1m_out_cached": 18.75,
+ "context_window": 200000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "anthropic/claude-opus-4.5",
+ "name": "Claude Opus 4.5",
+ "cost_per_1m_in": 5,
+ "cost_per_1m_out": 25,
+ "cost_per_1m_in_cached": 0.5,
+ "cost_per_1m_out_cached": 6.25,
+ "context_window": 200000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "anthropic/claude-sonnet-4",
+ "name": "Claude Sonnet 4",
+ "cost_per_1m_in": 3,
+ "cost_per_1m_out": 15,
+ "cost_per_1m_in_cached": 0.3,
+ "cost_per_1m_out_cached": 3.75,
+ "context_window": 200000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "anthropic/claude-sonnet-4.5",
+ "name": "Claude Sonnet 4.5",
+ "cost_per_1m_in": 3,
+ "cost_per_1m_out": 15,
+ "cost_per_1m_in_cached": 0.3,
+ "cost_per_1m_out_cached": 3.75,
+ "context_window": 200000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "openai/codex-mini",
+ "name": "Codex Mini",
+ "cost_per_1m_in": 1.5,
+ "cost_per_1m_out": 6,
+ "cost_per_1m_in_cached": 0.375,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 200000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "cohere/command-a",
+ "name": "Command A",
+ "cost_per_1m_in": 2.5,
+ "cost_per_1m_out": 10,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 256000,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "deepseek/deepseek-v3",
+ "name": "DeepSeek V3 0324",
+ "cost_per_1m_in": 0.77,
+ "cost_per_1m_out": 0.77,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 163840,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "deepseek/deepseek-v3.1-terminus",
+ "name": "DeepSeek V3.1 Terminus",
+ "cost_per_1m_in": 0.27,
+ "cost_per_1m_out": 1,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131072,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "deepseek/deepseek-v3.2-exp",
+ "name": "DeepSeek V3.2 Exp",
+ "cost_per_1m_in": 0.27,
+ "cost_per_1m_out": 0.39999999999999997,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 163840,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "deepseek/deepseek-v3.2-thinking",
+ "name": "DeepSeek V3.2 Thinking",
+ "cost_per_1m_in": 0.28,
+ "cost_per_1m_out": 0.42,
+ "cost_per_1m_in_cached": 0.028,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "deepseek/deepseek-v3.1",
+ "name": "DeepSeek-V3.1",
+ "cost_per_1m_in": 0.3,
+ "cost_per_1m_out": 1,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 163840,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "mistral/devstral-2",
+ "name": "Devstral 2",
+ "cost_per_1m_in": 0,
+ "cost_per_1m_out": 0,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 256000,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "mistral/devstral-small",
+ "name": "Devstral Small 1.1",
+ "cost_per_1m_in": 0.09999999999999999,
+ "cost_per_1m_out": 0.3,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "mistral/devstral-small-2",
+ "name": "Devstral Small 2",
+ "cost_per_1m_in": 0,
+ "cost_per_1m_out": 0,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 256000,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "zai/glm-4.5-air",
+ "name": "GLM 4.5 Air",
+ "cost_per_1m_in": 0.19999999999999998,
+ "cost_per_1m_out": 1.1,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "zai/glm-4.5v",
+ "name": "GLM 4.5V",
+ "cost_per_1m_in": 0.6,
+ "cost_per_1m_out": 1.7999999999999998,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 65536,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "zai/glm-4.6",
+ "name": "GLM 4.6",
+ "cost_per_1m_in": 0.44999999999999996,
+ "cost_per_1m_out": 1.7999999999999998,
+ "cost_per_1m_in_cached": 0.11,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 200000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "zai/glm-4.7",
+ "name": "GLM 4.7",
+ "cost_per_1m_in": 0.43,
+ "cost_per_1m_out": 1.75,
+ "cost_per_1m_in_cached": 0.08,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 202752,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "zai/glm-4.5",
+ "name": "GLM-4.5",
+ "cost_per_1m_in": 0.6,
+ "cost_per_1m_out": 2.2,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131072,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "zai/glm-4.6v",
+ "name": "GLM-4.6V",
+ "cost_per_1m_in": 0.3,
+ "cost_per_1m_out": 0.8999999999999999,
+ "cost_per_1m_in_cached": 0.049999999999999996,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "zai/glm-4.6v-flash",
+ "name": "GLM-4.6V-Flash",
+ "cost_per_1m_in": 0,
+ "cost_per_1m_out": 0,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "openai/gpt-5.1-codex-max",
+ "name": "GPT 5.1 Codex Max",
+ "cost_per_1m_in": 1.25,
+ "cost_per_1m_out": 10,
+ "cost_per_1m_in_cached": 0.125,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 400000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "openai/gpt-5.1-thinking",
+ "name": "GPT 5.1 Thinking",
+ "cost_per_1m_in": 1.25,
+ "cost_per_1m_out": 10,
+ "cost_per_1m_in_cached": 0.125,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 400000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "openai/gpt-5.2-pro",
+ "name": "GPT 5.2 ",
+ "cost_per_1m_in": 21,
+ "cost_per_1m_out": 168,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 400000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "openai/gpt-4-turbo",
+ "name": "GPT-4 Turbo",
+ "cost_per_1m_in": 10,
+ "cost_per_1m_out": 30,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 4096,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "openai/gpt-4.1",
+ "name": "GPT-4.1",
+ "cost_per_1m_in": 2,
+ "cost_per_1m_out": 8,
+ "cost_per_1m_in_cached": 0.5,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 1047576,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "openai/gpt-4.1-mini",
+ "name": "GPT-4.1 mini",
+ "cost_per_1m_in": 0.39999999999999997,
+ "cost_per_1m_out": 1.5999999999999999,
+ "cost_per_1m_in_cached": 0.09999999999999999,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 1047576,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "openai/gpt-4.1-nano",
+ "name": "GPT-4.1 nano",
+ "cost_per_1m_in": 0.09999999999999999,
+ "cost_per_1m_out": 0.39999999999999997,
+ "cost_per_1m_in_cached": 0.024999999999999998,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 1047576,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "openai/gpt-4o",
+ "name": "GPT-4o",
+ "cost_per_1m_in": 2.5,
+ "cost_per_1m_out": 10,
+ "cost_per_1m_in_cached": 1.25,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "openai/gpt-4o-mini",
+ "name": "GPT-4o mini",
+ "cost_per_1m_in": 0.15,
+ "cost_per_1m_out": 0.6,
+ "cost_per_1m_in_cached": 0.075,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "openai/gpt-5",
+ "name": "GPT-5",
+ "cost_per_1m_in": 1.25,
+ "cost_per_1m_out": 10,
+ "cost_per_1m_in_cached": 0.13,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 400000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "openai/gpt-5-chat",
+ "name": "GPT-5 Chat",
+ "cost_per_1m_in": 1.25,
+ "cost_per_1m_out": 10,
+ "cost_per_1m_in_cached": 0.125,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "openai/gpt-5-mini",
+ "name": "GPT-5 mini",
+ "cost_per_1m_in": 0.25,
+ "cost_per_1m_out": 2,
+ "cost_per_1m_in_cached": 0.03,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 400000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "openai/gpt-5-nano",
+ "name": "GPT-5 nano",
+ "cost_per_1m_in": 0.049999999999999996,
+ "cost_per_1m_out": 0.39999999999999997,
+ "cost_per_1m_in_cached": 0.01,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 400000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "openai/gpt-5-pro",
+ "name": "GPT-5 pro",
+ "cost_per_1m_in": 15,
+ "cost_per_1m_out": 120,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 400000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "openai/gpt-5-codex",
+ "name": "GPT-5-Codex",
+ "cost_per_1m_in": 1.25,
+ "cost_per_1m_out": 10,
+ "cost_per_1m_in_cached": 0.13,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 400000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "openai/gpt-5.1-codex-mini",
+ "name": "GPT-5.1 Codex mini",
+ "cost_per_1m_in": 0.25,
+ "cost_per_1m_out": 2,
+ "cost_per_1m_in_cached": 0.024999999999999998,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 400000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "openai/gpt-5.1-instant",
+ "name": "GPT-5.1 Instant",
+ "cost_per_1m_in": 1.25,
+ "cost_per_1m_out": 10,
+ "cost_per_1m_in_cached": 0.125,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "openai/gpt-5.1-codex",
+ "name": "GPT-5.1-Codex",
+ "cost_per_1m_in": 1.25,
+ "cost_per_1m_out": 10,
+ "cost_per_1m_in_cached": 0.125,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 400000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "openai/gpt-5.2",
+ "name": "GPT-5.2",
+ "cost_per_1m_in": 1.75,
+ "cost_per_1m_out": 14,
+ "cost_per_1m_in_cached": 0.175,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 400000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "openai/gpt-5.2-chat",
+ "name": "GPT-5.2 Chat",
+ "cost_per_1m_in": 1.75,
+ "cost_per_1m_out": 14,
+ "cost_per_1m_in_cached": 0.175,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "openai/gpt-5.2-codex",
+ "name": "GPT-5.2-Codex",
+ "cost_per_1m_in": 1.75,
+ "cost_per_1m_out": 14,
+ "cost_per_1m_in_cached": 0.175,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 400000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "google/gemini-2.0-flash",
+ "name": "Gemini 2.0 Flash",
+ "cost_per_1m_in": 0.09999999999999999,
+ "cost_per_1m_out": 0.39999999999999997,
+ "cost_per_1m_in_cached": 0.024999999999999998,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 1000000,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "google/gemini-2.0-flash-lite",
+ "name": "Gemini 2.0 Flash Lite",
+ "cost_per_1m_in": 0.075,
+ "cost_per_1m_out": 0.3,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 1048576,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "google/gemini-2.5-flash",
+ "name": "Gemini 2.5 Flash",
+ "cost_per_1m_in": 0.3,
+ "cost_per_1m_out": 2.5,
+ "cost_per_1m_in_cached": 0.03,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 1000000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "google/gemini-2.5-flash-lite",
+ "name": "Gemini 2.5 Flash Lite",
+ "cost_per_1m_in": 0.09999999999999999,
+ "cost_per_1m_out": 0.39999999999999997,
+ "cost_per_1m_in_cached": 0.01,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 1048576,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "google/gemini-2.5-flash-lite-preview-09-2025",
+ "name": "Gemini 2.5 Flash Lite Preview 09-2025",
+ "cost_per_1m_in": 0.09999999999999999,
+ "cost_per_1m_out": 0.39999999999999997,
+ "cost_per_1m_in_cached": 0.01,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 1048576,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "google/gemini-2.5-flash-preview-09-2025",
+ "name": "Gemini 2.5 Flash Preview 09-2025",
+ "cost_per_1m_in": 0.3,
+ "cost_per_1m_out": 2.5,
+ "cost_per_1m_in_cached": 0.03,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 1000000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "google/gemini-2.5-pro",
+ "name": "Gemini 2.5 Pro",
+ "cost_per_1m_in": 1.25,
+ "cost_per_1m_out": 10,
+ "cost_per_1m_in_cached": 0.125,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 1048576,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "google/gemini-3-flash",
+ "name": "Gemini 3 Flash",
+ "cost_per_1m_in": 0.5,
+ "cost_per_1m_out": 3,
+ "cost_per_1m_in_cached": 0.049999999999999996,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 1000000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "google/gemini-3-pro-preview",
+ "name": "Gemini 3 Pro Preview",
+ "cost_per_1m_in": 2,
+ "cost_per_1m_out": 12,
+ "cost_per_1m_in_cached": 0.19999999999999998,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 1000000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "xai/grok-2-vision",
+ "name": "Grok 2 Vision",
+ "cost_per_1m_in": 2,
+ "cost_per_1m_out": 10,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 32768,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "xai/grok-3",
+ "name": "Grok 3 Beta",
+ "cost_per_1m_in": 3,
+ "cost_per_1m_out": 15,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131072,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "xai/grok-3-fast",
+ "name": "Grok 3 Fast Beta",
+ "cost_per_1m_in": 5,
+ "cost_per_1m_out": 25,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131072,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "xai/grok-3-mini",
+ "name": "Grok 3 Mini Beta",
+ "cost_per_1m_in": 0.3,
+ "cost_per_1m_out": 0.5,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131072,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "xai/grok-3-mini-fast",
+ "name": "Grok 3 Mini Fast Beta",
+ "cost_per_1m_in": 0.6,
+ "cost_per_1m_out": 4,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131072,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "xai/grok-4",
+ "name": "Grok 4",
+ "cost_per_1m_in": 3,
+ "cost_per_1m_out": 15,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 256000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "xai/grok-4-fast-non-reasoning",
+ "name": "Grok 4 Fast Non-Reasoning",
+ "cost_per_1m_in": 0.19999999999999998,
+ "cost_per_1m_out": 0.5,
+ "cost_per_1m_in_cached": 0.049999999999999996,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 2000000,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "xai/grok-4-fast-reasoning",
+ "name": "Grok 4 Fast Reasoning",
+ "cost_per_1m_in": 0.19999999999999998,
+ "cost_per_1m_out": 0.5,
+ "cost_per_1m_in_cached": 0.049999999999999996,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 2000000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "xai/grok-4.1-fast-non-reasoning",
+ "name": "Grok 4.1 Fast Non-Reasoning",
+ "cost_per_1m_in": 0.19999999999999998,
+ "cost_per_1m_out": 0.5,
+ "cost_per_1m_in_cached": 0.049999999999999996,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 2000000,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "xai/grok-4.1-fast-reasoning",
+ "name": "Grok 4.1 Fast Reasoning",
+ "cost_per_1m_in": 0.19999999999999998,
+ "cost_per_1m_out": 0.5,
+ "cost_per_1m_in_cached": 0.049999999999999996,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 2000000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "xai/grok-code-fast-1",
+ "name": "Grok Code Fast 1",
+ "cost_per_1m_in": 0.19999999999999998,
+ "cost_per_1m_out": 1.5,
+ "cost_per_1m_in_cached": 0.02,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 256000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "prime-intellect/intellect-3",
+ "name": "INTELLECT 3",
+ "cost_per_1m_in": 0.19999999999999998,
+ "cost_per_1m_out": 1.1,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131072,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "moonshotai/kimi-k2",
+ "name": "Kimi K2",
+ "cost_per_1m_in": 0.5,
+ "cost_per_1m_out": 2,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131072,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "moonshotai/kimi-k2-thinking",
+ "name": "Kimi K2 Thinking",
+ "cost_per_1m_in": 0.47,
+ "cost_per_1m_out": 2,
+ "cost_per_1m_in_cached": 0.14100000000000001,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 216144,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "moonshotai/kimi-k2-thinking-turbo",
+ "name": "Kimi K2 Thinking Turbo",
+ "cost_per_1m_in": 1.15,
+ "cost_per_1m_out": 8,
+ "cost_per_1m_in_cached": 0.15,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 262114,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "moonshotai/kimi-k2-turbo",
+ "name": "Kimi K2 Turbo",
+ "cost_per_1m_in": 2.4,
+ "cost_per_1m_out": 10,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 256000,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "meta/llama-3.1-70b",
+ "name": "Llama 3.1 70B Instruct",
+ "cost_per_1m_in": 0.39999999999999997,
+ "cost_per_1m_out": 0.39999999999999997,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131072,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "meta/llama-3.1-8b",
+ "name": "Llama 3.1 8B Instruct",
+ "cost_per_1m_in": 0.03,
+ "cost_per_1m_out": 0.049999999999999996,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131072,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "meta/llama-3.2-11b",
+ "name": "Llama 3.2 11B Vision Instruct",
+ "cost_per_1m_in": 0.16,
+ "cost_per_1m_out": 0.16,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "meta/llama-3.2-90b",
+ "name": "Llama 3.2 90B Vision Instruct",
+ "cost_per_1m_in": 0.72,
+ "cost_per_1m_out": 0.72,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "meta/llama-3.3-70b",
+ "name": "Llama 3.3 70B Instruct",
+ "cost_per_1m_in": 0.72,
+ "cost_per_1m_out": 0.72,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "meta/llama-4-maverick",
+ "name": "Llama 4 Maverick 17B Instruct",
+ "cost_per_1m_in": 0.15,
+ "cost_per_1m_out": 0.6,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131072,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "meta/llama-4-scout",
+ "name": "Llama 4 Scout 17B Instruct",
+ "cost_per_1m_in": 0.08,
+ "cost_per_1m_out": 0.3,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131072,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "meituan/longcat-flash-chat",
+ "name": "LongCat Flash Chat",
+ "cost_per_1m_in": 0,
+ "cost_per_1m_out": 0,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "meituan/longcat-flash-thinking",
+ "name": "LongCat Flash Thinking",
+ "cost_per_1m_in": 0.15,
+ "cost_per_1m_out": 1.5,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "inception/mercury-coder-small",
+ "name": "Mercury Coder Small Beta",
+ "cost_per_1m_in": 0.25,
+ "cost_per_1m_out": 1,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 32000,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "xiaomi/mimo-v2-flash",
+ "name": "MiMo V2 Flash",
+ "cost_per_1m_in": 0.098,
+ "cost_per_1m_out": 0.293,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 262144,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "minimax/minimax-m2",
+ "name": "MiniMax M2",
+ "cost_per_1m_in": 0.27,
+ "cost_per_1m_out": 1.15,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 262114,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "minimax/minimax-m2.1",
+ "name": "MiniMax M2.1",
+ "cost_per_1m_in": 0.28,
+ "cost_per_1m_out": 1.2,
+ "cost_per_1m_in_cached": 0.14,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 196608,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "minimax/minimax-m2.1-lightning",
+ "name": "MiniMax M2.1 Lightning",
+ "cost_per_1m_in": 0.3,
+ "cost_per_1m_out": 2.4,
+ "cost_per_1m_in_cached": 0.03,
+ "cost_per_1m_out_cached": 0.375,
+ "context_window": 204800,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "mistral/ministral-3b",
+ "name": "Ministral 3B",
+ "cost_per_1m_in": 0.04,
+ "cost_per_1m_out": 0.04,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 4000,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "mistral/ministral-8b",
+ "name": "Ministral 8B",
+ "cost_per_1m_in": 0.09999999999999999,
+ "cost_per_1m_out": 0.09999999999999999,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 4000,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "mistral/codestral",
+ "name": "Mistral Codestral",
+ "cost_per_1m_in": 0.3,
+ "cost_per_1m_out": 0.8999999999999999,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 4000,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "mistral/mistral-medium",
+ "name": "Mistral Medium 3.1",
+ "cost_per_1m_in": 0.39999999999999997,
+ "cost_per_1m_out": 2,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "mistral/mistral-small",
+ "name": "Mistral Small",
+ "cost_per_1m_in": 0.09999999999999999,
+ "cost_per_1m_out": 0.3,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 32000,
+ "default_max_tokens": 4000,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "nvidia/nemotron-nano-12b-v2-vl",
+ "name": "Nvidia Nemotron Nano 12B V2 VL",
+ "cost_per_1m_in": 0.19999999999999998,
+ "cost_per_1m_out": 0.6,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131072,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "nvidia/nemotron-nano-9b-v2",
+ "name": "Nvidia Nemotron Nano 9B V2",
+ "cost_per_1m_in": 0.04,
+ "cost_per_1m_out": 0.16,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131072,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "mistral/pixtral-12b",
+ "name": "Pixtral 12B 2409",
+ "cost_per_1m_in": 0.15,
+ "cost_per_1m_out": 0.15,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 4000,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "mistral/pixtral-large",
+ "name": "Pixtral Large",
+ "cost_per_1m_in": 2,
+ "cost_per_1m_out": 6,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 4000,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "alibaba/qwen3-coder-30b-a3b",
+ "name": "Qwen 3 Coder 30B A3B Instruct",
+ "cost_per_1m_in": 0.07,
+ "cost_per_1m_out": 0.27,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 160000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "alibaba/qwen-3-32b",
+ "name": "Qwen 3.32B",
+ "cost_per_1m_in": 0.09999999999999999,
+ "cost_per_1m_out": 0.3,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 40960,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "alibaba/qwen3-235b-a22b-thinking",
+ "name": "Qwen3 235B A22B Thinking 2507",
+ "cost_per_1m_in": 0.3,
+ "cost_per_1m_out": 2.9000000000000004,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 262114,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "alibaba/qwen-3-235b",
+ "name": "Qwen3 235B A22b Instruct 2507",
+ "cost_per_1m_in": 0.071,
+ "cost_per_1m_out": 0.463,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 40960,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "alibaba/qwen3-coder",
+ "name": "Qwen3 Coder 480B A35B Instruct",
+ "cost_per_1m_in": 0.38,
+ "cost_per_1m_out": 1.53,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 262144,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "alibaba/qwen3-coder-plus",
+ "name": "Qwen3 Coder Plus",
+ "cost_per_1m_in": 1,
+ "cost_per_1m_out": 5,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 1000000,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "alibaba/qwen3-max",
+ "name": "Qwen3 Max",
+ "cost_per_1m_in": 1.2,
+ "cost_per_1m_out": 6,
+ "cost_per_1m_in_cached": 0.24,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 262144,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "alibaba/qwen3-max-preview",
+ "name": "Qwen3 Max Preview",
+ "cost_per_1m_in": 1.2,
+ "cost_per_1m_out": 6,
+ "cost_per_1m_in_cached": 0.24,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 262144,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "alibaba/qwen-3-14b",
+ "name": "Qwen3-14B",
+ "cost_per_1m_in": 0.06,
+ "cost_per_1m_out": 0.24,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 40960,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "alibaba/qwen-3-30b",
+ "name": "Qwen3-30B-A3B",
+ "cost_per_1m_in": 0.08,
+ "cost_per_1m_out": 0.29,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 40960,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "bytedance/seed-1.6",
+ "name": "Seed 1.6",
+ "cost_per_1m_in": 0.25,
+ "cost_per_1m_out": 2,
+ "cost_per_1m_in_cached": 0.049999999999999996,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 256000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "perplexity/sonar",
+ "name": "Sonar",
+ "cost_per_1m_in": 1,
+ "cost_per_1m_out": 1,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 127000,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "perplexity/sonar-pro",
+ "name": "Sonar Pro",
+ "cost_per_1m_in": 3,
+ "cost_per_1m_out": 15,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 200000,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "stealth/sonoma-dusk-alpha",
+ "name": "Sonoma Dusk Alpha",
+ "cost_per_1m_in": 0.19999999999999998,
+ "cost_per_1m_out": 0.5,
+ "cost_per_1m_in_cached": 0.049999999999999996,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 2000000,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "stealth/sonoma-sky-alpha",
+ "name": "Sonoma Sky Alpha",
+ "cost_per_1m_in": 0.19999999999999998,
+ "cost_per_1m_out": 0.5,
+ "cost_per_1m_in_cached": 0.049999999999999996,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 2000000,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "openai/gpt-oss-120b",
+ "name": "gpt-oss-120b",
+ "cost_per_1m_in": 0.09999999999999999,
+ "cost_per_1m_out": 0.5,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131072,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "openai/gpt-oss-20b",
+ "name": "gpt-oss-20b",
+ "cost_per_1m_in": 0.07,
+ "cost_per_1m_out": 0.3,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "openai/gpt-oss-safeguard-20b",
+ "name": "gpt-oss-safeguard-20b",
+ "cost_per_1m_in": 0.075,
+ "cost_per_1m_out": 0.3,
+ "cost_per_1m_in_cached": 0.037,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131072,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "openai/o1",
+ "name": "o1",
+ "cost_per_1m_in": 15,
+ "cost_per_1m_out": 60,
+ "cost_per_1m_in_cached": 7.5,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 200000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "openai/o3",
+ "name": "o3",
+ "cost_per_1m_in": 2,
+ "cost_per_1m_out": 8,
+ "cost_per_1m_in_cached": 0.5,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 200000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "openai/o3-pro",
+ "name": "o3 Pro",
+ "cost_per_1m_in": 20,
+ "cost_per_1m_out": 80,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 200000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "openai/o3-deep-research",
+ "name": "o3-deep-research",
+ "cost_per_1m_in": 10,
+ "cost_per_1m_out": 40,
+ "cost_per_1m_in_cached": 2.5,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 200000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "openai/o3-mini",
+ "name": "o3-mini",
+ "cost_per_1m_in": 1.1,
+ "cost_per_1m_out": 4.4,
+ "cost_per_1m_in_cached": 0.55,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 200000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "openai/o4-mini",
+ "name": "o4-mini",
+ "cost_per_1m_in": 1.1,
+ "cost_per_1m_out": 4.4,
+ "cost_per_1m_in_cached": 0.275,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 200000,
+ "default_max_tokens": 8000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "vercel/v0-1.0-md",
+ "name": "v0-1.0-md",
+ "cost_per_1m_in": 3,
+ "cost_per_1m_out": 15,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "vercel/v0-1.5-md",
+ "name": "v0-1.5-md",
+ "cost_per_1m_in": 3,
+ "cost_per_1m_out": 15,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 8000,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ }
+ ],
+ "default_headers": {
+ "HTTP-Referer": "https://charm.land",
+ "X-Title": "Crush"
+ }
+}