@@ -0,0 +1,1564 @@
+{
+ "name": "Cortecs",
+ "id": "cortecs",
+ "api_key": "$CORTECS_API_KEY",
+ "api_endpoint": "https://api.cortecs.ai/v1",
+ "type": "openai",
+ "default_large_model_id": "qwen3-coder-30b-a3b-instruct",
+ "default_small_model_id": "glm-4.7-flash",
+ "models": [
+ {
+ "id": "glm-5",
+ "name": "GLM 5",
+ "cost_per_1m_in": 0.932,
+ "cost_per_1m_out": 2.982,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 202752,
+ "default_max_tokens": 20275,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "glm-4.6",
+ "name": "GLM 4.6",
+ "cost_per_1m_in": 0.373,
+ "cost_per_1m_out": 1.631,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 203000,
+ "default_max_tokens": 20300,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "deepseek-chat-v3.1",
+ "name": "DeepSeek Chat V3.1",
+ "cost_per_1m_in": 0.186,
+ "cost_per_1m_out": 0.745,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 164000,
+ "default_max_tokens": 16400,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "qwen-2.5-72b-instruct",
+ "name": "Qwen2.5 72B Instruct",
+ "cost_per_1m_in": 0.065,
+ "cost_per_1m_out": 0.242,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 33000,
+ "default_max_tokens": 3300,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "qwen3.5-397b-a17b",
+ "name": "Qwen3.5 397B A17B ",
+ "cost_per_1m_in": 0.559,
+ "cost_per_1m_out": 3.354,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 262000,
+ "default_max_tokens": 25000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "deepseek-v3.2",
+ "name": "DeepSeek V3.2",
+ "cost_per_1m_in": 0.28,
+ "cost_per_1m_out": 0.466,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 163840,
+ "default_max_tokens": 16384,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "mistral-small-2603",
+ "name": "Mistral Small 4 2603",
+ "cost_per_1m_in": 0.134,
+ "cost_per_1m_out": 0.536,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 256000,
+ "default_max_tokens": 25600,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "minimax-m2.5",
+ "name": "MiniMax M2.5",
+ "cost_per_1m_in": 0.28,
+ "cost_per_1m_out": 1.025,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 196608,
+ "default_max_tokens": 6553,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "claude-4-6-sonnet",
+ "name": "Claude Sonnet 4.6",
+ "cost_per_1m_in": 3.099,
+ "cost_per_1m_out": 15.495,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 1000000,
+ "default_max_tokens": 100000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "glm-4.7-flash",
+ "name": "GLM 4.7 Flash",
+ "cost_per_1m_in": 0.075,
+ "cost_per_1m_out": 0.451,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 203000,
+ "default_max_tokens": 20300,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "kimi-k2.5",
+ "name": "Kimi K2.5",
+ "cost_per_1m_in": 0.466,
+ "cost_per_1m_out": 2.236,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 256000,
+ "default_max_tokens": 25600,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "claude-opus4-6",
+ "name": "Claude Opus 4.6",
+ "cost_per_1m_in": 5.165,
+ "cost_per_1m_out": 25.826,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 1000000,
+ "default_max_tokens": 100000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "minimax-m2",
+ "name": "MiniMax M2",
+ "cost_per_1m_in": 0.233,
+ "cost_per_1m_out": 0.932,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 196608,
+ "default_max_tokens": 19660,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "glm-4.7",
+ "name": "GLM 4.7",
+ "cost_per_1m_in": 0.376,
+ "cost_per_1m_out": 1.878,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 200000,
+ "default_max_tokens": 20000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "minimax-m2.1",
+ "name": "MiniMax M2.1",
+ "cost_per_1m_in": 0.282,
+ "cost_per_1m_out": 1.127,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 196000,
+ "default_max_tokens": 19600,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "llama-guard-3-8b",
+ "name": "Llama Guard 3 8B",
+ "cost_per_1m_in": 0.019,
+ "cost_per_1m_out": 0.056,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 12800,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "qwen3-vl-235b-a22b",
+ "name": "Qwen3 VL 235B A22B",
+ "cost_per_1m_in": 0.196,
+ "cost_per_1m_out": 1.77,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131000,
+ "default_max_tokens": 13100,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "mistral-small-creative",
+ "name": "Mistral Small Creative",
+ "cost_per_1m_in": 0.105,
+ "cost_per_1m_out": 0.315,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 32000,
+ "default_max_tokens": 3200,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "nvidia-nemotron-3-nano-30b-a3b",
+ "name": "Nemotron 3 Nano 30B A3B",
+ "cost_per_1m_in": 0.056,
+ "cost_per_1m_out": 0.226,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 12800,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "claude-opus4-5",
+ "name": "Claude Opus 4.5",
+ "cost_per_1m_in": 5.165,
+ "cost_per_1m_out": 25.826,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 200000,
+ "default_max_tokens": 20000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "qwen3-next-80b-a3b-thinking",
+ "name": "Qwen3 Next 80B A3B Thinking",
+ "cost_per_1m_in": 0.14,
+ "cost_per_1m_out": 1.118,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 262000,
+ "default_max_tokens": 12800,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "holo2-30b-a3b",
+ "name": "Holo2 30B A3B",
+ "cost_per_1m_in": 0.315,
+ "cost_per_1m_out": 0.735,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 22000,
+ "default_max_tokens": 2200,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "devstral-2512",
+ "name": "Devstral 2 2512",
+ "cost_per_1m_in": 0.42,
+ "cost_per_1m_out": 2.1,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 262000,
+ "default_max_tokens": 20000,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "nova-2-lite",
+ "name": "Nova 2 Lite",
+ "cost_per_1m_in": 0.352,
+ "cost_per_1m_out": 2.963,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 1000000,
+ "default_max_tokens": 100000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "gpt-oss-safeguard-120b",
+ "name": "GPT OSS Safeguard 120B",
+ "cost_per_1m_in": 0.169,
+ "cost_per_1m_out": 0.657,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 12800,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "mistral-large-2512",
+ "name": "Mistral Large 3 2512",
+ "cost_per_1m_in": 0.525,
+ "cost_per_1m_out": 1.575,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 256000,
+ "default_max_tokens": 25600,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "ministral-8b-2512",
+ "name": "Ministral 3 8b 2512",
+ "cost_per_1m_in": 0.158,
+ "cost_per_1m_out": 0.158,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 256000,
+ "default_max_tokens": 25600,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "ministral-3b-2512",
+ "name": "Ministral 3 3b 2512",
+ "cost_per_1m_in": 0.105,
+ "cost_per_1m_out": 0.105,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 256000,
+ "default_max_tokens": 25600,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "ministral-14b-2512",
+ "name": "Ministral 3 14b 2512",
+ "cost_per_1m_in": 0.21,
+ "cost_per_1m_out": 0.21,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 256000,
+ "default_max_tokens": 25600,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "kimi-k2-thinking",
+ "name": "Kimi K2 Thinking",
+ "cost_per_1m_in": 0.564,
+ "cost_per_1m_out": 2.348,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 262000,
+ "default_max_tokens": 26200,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "intellect-3",
+ "name": "INTELLECT-3",
+ "cost_per_1m_in": 0.188,
+ "cost_per_1m_out": 1.033,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 12800,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "gpt-5.1",
+ "name": "GPT 5.1",
+ "cost_per_1m_in": 1.296,
+ "cost_per_1m_out": 10.33,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 400000,
+ "default_max_tokens": 40000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "nemotron-nano-v2-12b",
+ "name": "Nemotron Nano V2 12b",
+ "cost_per_1m_in": 0.066,
+ "cost_per_1m_out": 0.188,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 12800,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "claude-haiku-4-5",
+ "name": "Claude Haiku 4.5",
+ "cost_per_1m_in": 0.939,
+ "cost_per_1m_out": 4.696,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 200000,
+ "default_max_tokens": 20000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "claude-4-5-sonnet",
+ "name": "Claude 4.5 Sonnet",
+ "cost_per_1m_in": 2.817,
+ "cost_per_1m_out": 14.087,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 200000,
+ "default_max_tokens": 20000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "magistral-medium-2509",
+ "name": "Magistral Medium 2509",
+ "cost_per_1m_in": 2.1,
+ "cost_per_1m_out": 5.25,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 12800,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "magistral-small-2509",
+ "name": "Magistral Small 2509",
+ "cost_per_1m_in": 0.525,
+ "cost_per_1m_out": 1.575,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 12800,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "hermes-4-70b",
+ "name": "Hermes 4 70B",
+ "cost_per_1m_in": 0.122,
+ "cost_per_1m_out": 0.376,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 12800,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "gpt-5",
+ "name": "GPT 5",
+ "cost_per_1m_in": 1.296,
+ "cost_per_1m_out": 10.33,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 400000,
+ "default_max_tokens": 40000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "qwen3-235b-a22b-thinking-2507",
+ "name": "Qwen3 235B A22B Thinking 2507",
+ "cost_per_1m_in": 0.188,
+ "cost_per_1m_out": 0.752,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 262000,
+ "default_max_tokens": 26200,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "gpt-oss-120b",
+ "name": "GPT Oss 120b",
+ "cost_per_1m_in": 0.037,
+ "cost_per_1m_out": 0.186,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131000,
+ "default_max_tokens": 12800,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "qwen3-30b-a3b-instruct-2507",
+ "name": "Qwen3 30B A3B Instruct 2507",
+ "cost_per_1m_in": 0.093,
+ "cost_per_1m_out": 0.281,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 262000,
+ "default_max_tokens": 26200,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "qwen3-30b-a3b-thinking-2507",
+ "name": "Qwen3 30B A3B Thinking 2507",
+ "cost_per_1m_in": 0.093,
+ "cost_per_1m_out": 0.281,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 262000,
+ "default_max_tokens": 26200,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "qwen3-coder-480b-a35b-instruct",
+ "name": "Qwen3 Coder 480B A35B Instruct",
+ "cost_per_1m_in": 0.376,
+ "cost_per_1m_out": 1.691,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 262000,
+ "default_max_tokens": 26200,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "gpt-oss-20b",
+ "name": "GPT Oss 20b",
+ "cost_per_1m_in": 0.028,
+ "cost_per_1m_out": 0.13,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131000,
+ "default_max_tokens": 12800,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "kimi-k2-instruct",
+ "name": "Kimi K2 Instruct",
+ "cost_per_1m_in": 0.469,
+ "cost_per_1m_out": 2.254,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131000,
+ "default_max_tokens": 13100,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "glm-4.5",
+ "name": "GLM 4.5",
+ "cost_per_1m_in": 0.564,
+ "cost_per_1m_out": 2.066,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 12800,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "glm-4.5-air",
+ "name": "GLM 4.5 Air",
+ "cost_per_1m_in": 0.188,
+ "cost_per_1m_out": 1.127,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 12800,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "mistral-7b-instruct-v0.3",
+ "name": "Mistral 7B Instruct v0.3",
+ "cost_per_1m_in": 0.105,
+ "cost_per_1m_out": 0.105,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 127000,
+ "default_max_tokens": 12700,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "mistral-large-2402",
+ "name": "Mistral Large 2402",
+ "cost_per_1m_in": 4.038,
+ "cost_per_1m_out": 12.208,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 32000,
+ "default_max_tokens": 3200,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "pixtral-large-2502",
+ "name": "Pixtral Large 25.02",
+ "cost_per_1m_in": 1.878,
+ "cost_per_1m_out": 5.634,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 12800,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "mistral-small-3.2-24b-instruct-2506",
+ "name": "Mistral Small 3.2 24B Instruct 2506",
+ "cost_per_1m_in": 0.095,
+ "cost_per_1m_out": 0.294,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 12800,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "qwen3-32b",
+ "name": "Qwen3 32B",
+ "cost_per_1m_in": 0.093,
+ "cost_per_1m_out": 0.281,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 40000,
+ "default_max_tokens": 1638,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "qwen3-235b-a22b-instruct-2507",
+ "name": "Qwen3 235B A22B Instruct 2507",
+ "cost_per_1m_in": 0.065,
+ "cost_per_1m_out": 0.429,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131000,
+ "default_max_tokens": 13100,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "qwen3-coder-30b-a3b-instruct",
+ "name": "Qwen3 Coder 30b a3b Instruct",
+ "cost_per_1m_in": 0.056,
+ "cost_per_1m_out": 0.233,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 262000,
+ "default_max_tokens": 3200,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "gpt-4.1",
+ "name": "GPT 4.1",
+ "cost_per_1m_in": 2.066,
+ "cost_per_1m_out": 8.266,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 1047576,
+ "default_max_tokens": 104757,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "gpt-4.1-mini",
+ "name": "GPT 4.1 mini",
+ "cost_per_1m_in": 0.41,
+ "cost_per_1m_out": 1.607,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 1047576,
+ "default_max_tokens": 104757,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "gpt-4.1-nano",
+ "name": "GPT 4.1 nano",
+ "cost_per_1m_in": 0.105,
+ "cost_per_1m_out": 0.41,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 1047576,
+ "default_max_tokens": 104757,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "nova-micro-v1",
+ "name": "Nova Micro 1.0",
+ "cost_per_1m_in": 0.038,
+ "cost_per_1m_out": 0.15,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 12800,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "nova-lite-v1",
+ "name": "Nova Lite 1.0",
+ "cost_per_1m_in": 0.065,
+ "cost_per_1m_out": 0.259,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 300000,
+ "default_max_tokens": 30000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "nova-pro-v1",
+ "name": "Nova Pro 1.0",
+ "cost_per_1m_in": 0.865,
+ "cost_per_1m_out": 3.46,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 300000,
+ "default_max_tokens": 30000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "claude-sonnet-4",
+ "name": "Claude Sonnet 4",
+ "cost_per_1m_in": 2.817,
+ "cost_per_1m_out": 14.087,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 200000,
+ "default_max_tokens": 20000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "claude-3-7-sonnet",
+ "name": "Claude 3.7 Sonnet",
+ "cost_per_1m_in": 2.817,
+ "cost_per_1m_out": 14.087,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 200000,
+ "default_max_tokens": 20000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "llama-3.1-nemotron-ultra-253b-v1",
+ "name": "Llama 3.1 Nemotron Ultra 253B v1",
+ "cost_per_1m_in": 0.564,
+ "cost_per_1m_out": 1.691,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 12800,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "llama-4-maverick",
+ "name": "Llama 4 Maverick",
+ "cost_per_1m_in": 0.13,
+ "cost_per_1m_out": 0.633,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 1050000,
+ "default_max_tokens": 105000,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "deepseek-v3-0324",
+ "name": "DeepSeek V3 0324",
+ "cost_per_1m_in": 0.28,
+ "cost_per_1m_out": 0.932,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 163840,
+ "default_max_tokens": 12800,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "mistral-small-2503",
+ "name": "Mistral Small 2503",
+ "cost_per_1m_in": 0.105,
+ "cost_per_1m_out": 0.315,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 12800,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "mistral-small-2506",
+ "name": "Mistral Small 2506",
+ "cost_per_1m_in": 0.105,
+ "cost_per_1m_out": 0.315,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131072,
+ "default_max_tokens": 13107,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "gemini-2.0-flash-001",
+ "name": "Gemini 2.0 Flash",
+ "cost_per_1m_in": 0.141,
+ "cost_per_1m_out": 0.563,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 1048576,
+ "default_max_tokens": 104857,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "gemini-2.0-flash-lite-001",
+ "name": "Gemini 2.0 Flash Lite",
+ "cost_per_1m_in": 0.07,
+ "cost_per_1m_out": 0.282,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 1048576,
+ "default_max_tokens": 104857,
+ "can_reason": false,
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "gemini-2.5-flash",
+ "name": "Gemini 2.5 Flash",
+ "cost_per_1m_in": 0.282,
+ "cost_per_1m_out": 2.348,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 1048576,
+ "default_max_tokens": 104857,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "gemini-2.5-pro",
+ "name": "Gemini 2.5 Pro",
+ "cost_per_1m_in": 1.409,
+ "cost_per_1m_out": 9.391,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 1048576,
+ "default_max_tokens": 104857,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "gemma-3-27b-it",
+ "name": "Gemma 3 27b it",
+ "cost_per_1m_in": 0.093,
+ "cost_per_1m_out": 0.281,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131000,
+ "default_max_tokens": 4000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "deepseek-r1-distill-llama-70b",
+ "name": "Deepseek R1 Distill LLama 70B",
+ "cost_per_1m_in": 0.704,
+ "cost_per_1m_out": 0.704,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131000,
+ "default_max_tokens": 1600,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "deepseek-r1-0528",
+ "name": "DeepSeek R1 0528",
+ "cost_per_1m_in": 0.752,
+ "cost_per_1m_out": 2.254,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 164000,
+ "default_max_tokens": 16400,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "codestral-2508",
+ "name": "Codestral 25.08",
+ "cost_per_1m_in": 0.315,
+ "cost_per_1m_out": 0.945,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 256000,
+ "default_max_tokens": 25600,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "llama-3.3-70b-instruct",
+ "name": "Llama 3.3 70B Instruct",
+ "cost_per_1m_in": 0.093,
+ "cost_per_1m_out": 0.289,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131000,
+ "default_max_tokens": 12800,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "gpt-4o",
+ "name": "GPT 4o",
+ "cost_per_1m_in": 2.506,
+ "cost_per_1m_out": 10.024,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 12800,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "gpt-5-mini",
+ "name": "GPT 5 mini",
+ "cost_per_1m_in": 0.263,
+ "cost_per_1m_out": 2.066,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 400000,
+ "default_max_tokens": 40000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "gpt-5-nano",
+ "name": "GPT 5 nano",
+ "cost_per_1m_in": 0.057,
+ "cost_per_1m_out": 0.414,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 400000,
+ "default_max_tokens": 40000,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "mistral-large-2411",
+ "name": "Mistral Large 2411",
+ "cost_per_1m_in": 1.89,
+ "cost_per_1m_out": 5.67,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131072,
+ "default_max_tokens": 13107,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "hermes-4-405b",
+ "name": "Hermes 4 405B",
+ "cost_per_1m_in": 0.939,
+ "cost_per_1m_out": 2.817,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 12800,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "mistral-nemo-instruct-2407",
+ "name": "Mistral Nemo 2407",
+ "cost_per_1m_in": 0.137,
+ "cost_per_1m_out": 0.137,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131072,
+ "default_max_tokens": 11800,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "devstral-medium-2507",
+ "name": "Devstral Medium 2507",
+ "cost_per_1m_in": 0.42,
+ "cost_per_1m_out": 2.1,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131072,
+ "default_max_tokens": 13107,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "devstral-small-2507",
+ "name": "Devstral Small 2507",
+ "cost_per_1m_in": 0.105,
+ "cost_per_1m_out": 0.315,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 131072,
+ "default_max_tokens": 13107,
+ "can_reason": false,
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "mistral-medium-2508",
+ "name": "Mistral Medium 2508",
+ "cost_per_1m_in": 0.42,
+ "cost_per_1m_out": 2.1,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 12800,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "llama-3.1-405b-instruct",
+ "name": "Llama 3.1 405B Instruct",
+ "cost_per_1m_in": 1.838,
+ "cost_per_1m_out": 1.838,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 12800,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "gpt-4o-mini",
+ "name": "GPT 4o mini",
+ "cost_per_1m_in": 0.15,
+ "cost_per_1m_out": 0.601,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 12800,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": true,
+ "options": {}
+ },
+ {
+ "id": "llama-3.1-8b-instruct",
+ "name": "Llama 3.1 8B Instruct",
+ "cost_per_1m_in": 0.019,
+ "cost_per_1m_out": 0.057,
+ "cost_per_1m_in_cached": 0,
+ "cost_per_1m_out_cached": 0,
+ "context_window": 128000,
+ "default_max_tokens": 12800,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ }
+ ]
+}