{
  "name": "Cortecs",
  "id": "cortecs",
  "api_key": "$CORTECS_API_KEY",
  "api_endpoint": "https://api.cortecs.ai/v1",
  "type": "openai",
  "default_large_model_id": "qwen3-coder-30b-a3b-instruct",
  "default_small_model_id": "glm-4.7-flash",
  "models": [
    {
      "id": "deepseek-v4-pro",
      "name": "DeepSeek V4 Pro",
      "cost_per_1m_in": 1.55295,
      "cost_per_1m_out": 3.1059,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 1048576,
      "default_max_tokens": 104857,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "deepseek-v4-flash",
      "name": "DeepSeek V4 Flash",
      "cost_per_1m_in": 0.13311,
      "cost_per_1m_out": 0.26622,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 1048576,
      "default_max_tokens": 104857,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "mistral-medium-3.5",
      "name": "Mistral Medium 3.5",
      "cost_per_1m_in": 1.25,
      "cost_per_1m_out": 6.4,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 256000,
      "default_max_tokens": 25600,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "nvidia-nemotron-3-nano-omni",
      "name": "Nemotron 3 Nano Omni",
      "cost_per_1m_in": 0.0532447,
      "cost_per_1m_out": 0.212976,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 300000,
      "default_max_tokens": 30000,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "gpt-5.4",
      "name": "GPT 5.4",
      "cost_per_1m_in": 2.601,
      "cost_per_1m_out": 13.872,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 1050000,
      "default_max_tokens": 105000,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "kimi-k2.6",
      "name": "Kimi K2.6",
      "cost_per_1m_in": 0.6936,
      "cost_per_1m_out": 3.0345,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 256000,
      "default_max_tokens": 25600,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "claude-opus4-7",
      "name": "Claude Opus 4.7",
      "cost_per_1m_in": 4.7685,
      "cost_per_1m_out": 23.8425,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 1000000,
      "default_max_tokens": 100000,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "minimax-m2.7",
      "name": "MiniMax M2.7",
      "cost_per_1m_in": 0.26622,
      "cost_per_1m_out": 1.06488,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 196608,
      "default_max_tokens": 19660,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "glm-5.1",
      "name": "GLM 5.1",
      "cost_per_1m_in": 1.24236,
      "cost_per_1m_out": 3.90336,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 202752,
      "default_max_tokens": 20275,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "qwen3.5-122b-a10b",
      "name": "Qwen3.5 122B A10B",
      "cost_per_1m_in": 0.4437,
      "cost_per_1m_out": 3.1059,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 262144,
      "default_max_tokens": 26214,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "qwen3.5-9b",
      "name": "Qwen3.5 9B",
      "cost_per_1m_in": 0.1,
      "cost_per_1m_out": 0.15,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 262000,
      "default_max_tokens": 26200,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "nemotron-3-super-120b-a12b",
      "name": "Nemotron 3 Super 120B A12B",
      "cost_per_1m_in": 0.15606,
      "cost_per_1m_out": 0.67626,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 262000,
      "default_max_tokens": 26214,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "qwen3-coder-next",
      "name": "Qwen3 Coder Next",
      "cost_per_1m_in": 0.15,
      "cost_per_1m_out": 0.8,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 256000,
      "default_max_tokens": 25600,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "glm-5",
      "name": "GLM 5",
      "cost_per_1m_in": 0.8874,
      "cost_per_1m_out": 2.83968,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 202752,
      "default_max_tokens": 20275,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "glm-4.6",
      "name": "GLM 4.6",
      "cost_per_1m_in": 0.35496,
      "cost_per_1m_out": 1.55295,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 203000,
      "default_max_tokens": 20300,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "deepseek-chat-v3.1",
      "name": "DeepSeek Chat V3.1",
      "cost_per_1m_in": 0.17748,
      "cost_per_1m_out": 0.70992,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 164000,
      "default_max_tokens": 16400,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "qwen-2.5-72b-instruct",
      "name": "Qwen2.5 72B Instruct",
      "cost_per_1m_in": 0.062118,
      "cost_per_1m_out": 0.230724,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 33000,
      "default_max_tokens": 3300,
      "can_reason": false,
      "supports_attachments": false
    },
    {
      "id": "qwen3.5-397b-a17b",
      "name": "Qwen3.5 397B A17B ",
      "cost_per_1m_in": 0.53244,
      "cost_per_1m_out": 3.19464,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 262000,
      "default_max_tokens": 25000,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "deepseek-v3.2",
      "name": "DeepSeek V3.2",
      "cost_per_1m_in": 0.26622,
      "cost_per_1m_out": 0.4437,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 163840,
      "default_max_tokens": 16384,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "mistral-small-2603",
      "name": "Mistral Small 4 2603",
      "cost_per_1m_in": 0.1275,
      "cost_per_1m_out": 0.51,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 256000,
      "default_max_tokens": 25600,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "minimax-m2.5",
      "name": "MiniMax M2.5",
      "cost_per_1m_in": 0.26622,
      "cost_per_1m_out": 0.97614,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 196608,
      "default_max_tokens": 19660,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "claude-4-6-sonnet",
      "name": "Claude Sonnet 4.6",
      "cost_per_1m_in": 2.8691,
      "cost_per_1m_out": 14.3095,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 1000000,
      "default_max_tokens": 100000,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "glm-4.7-flash",
      "name": "GLM 4.7 Flash",
      "cost_per_1m_in": 0.0716,
      "cost_per_1m_out": 0.4293,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 203000,
      "default_max_tokens": 20300,
      "can_reason": false,
      "supports_attachments": false
    },
    {
      "id": "kimi-k2.5",
      "name": "Kimi K2.5",
      "cost_per_1m_in": 0.4437,
      "cost_per_1m_out": 2.12976,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 256000,
      "default_max_tokens": 25600,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "claude-opus4-6",
      "name": "Claude Opus 4.6",
      "cost_per_1m_in": 4.7685,
      "cost_per_1m_out": 23.8425,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 1000000,
      "default_max_tokens": 100000,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "minimax-m2",
      "name": "MiniMax M2",
      "cost_per_1m_in": 0.22185,
      "cost_per_1m_out": 0.8874,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 196608,
      "default_max_tokens": 19660,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "glm-4.7",
      "name": "GLM 4.7",
      "cost_per_1m_in": 0.53244,
      "cost_per_1m_out": 1.95228,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 202752,
      "default_max_tokens": 20275,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "minimax-m2.1",
      "name": "MiniMax M2.1",
      "cost_per_1m_in": 0.322,
      "cost_per_1m_out": 1.2879,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 196000,
      "default_max_tokens": 19600,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "qwen3-vl-235b-a22b",
      "name": "Qwen3 VL 235B A22B",
      "cost_per_1m_in": 0.186354,
      "cost_per_1m_out": 1.68606,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 131000,
      "default_max_tokens": 13100,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "nvidia-nemotron-3-nano-30b-a3b",
      "name": "Nemotron 3 Nano 30B A3B",
      "cost_per_1m_in": 0.0537,
      "cost_per_1m_out": 0.215,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 12800,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "claude-opus4-5",
      "name": "Claude Opus 4.5",
      "cost_per_1m_in": 4.7695,
      "cost_per_1m_out": 23.8485,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 200000,
      "default_max_tokens": 20000,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "qwen3-next-80b-a3b-thinking",
      "name": "Qwen3 Next 80B A3B Thinking",
      "cost_per_1m_in": 0.13311,
      "cost_per_1m_out": 1.06488,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 262000,
      "default_max_tokens": 12800,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "holo2-30b-a3b",
      "name": "Holo2 30B A3B",
      "cost_per_1m_in": 0.3,
      "cost_per_1m_out": 0.7,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 22000,
      "default_max_tokens": 2200,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "devstral-2512",
      "name": "Devstral 2 2512",
      "cost_per_1m_in": 0.4,
      "cost_per_1m_out": 2,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 262000,
      "default_max_tokens": 26200,
      "can_reason": false,
      "supports_attachments": false
    },
    {
      "id": "nova-2-lite",
      "name": "Nova 2 Lite",
      "cost_per_1m_in": 0.335,
      "cost_per_1m_out": 2.822,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 1000000,
      "default_max_tokens": 100000,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "gpt-oss-safeguard-120b",
      "name": "GPT OSS Safeguard 120B",
      "cost_per_1m_in": 0.161,
      "cost_per_1m_out": 0.626,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 12800,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "mistral-large-2512",
      "name": "Mistral Large 3 2512",
      "cost_per_1m_in": 0.5,
      "cost_per_1m_out": 1.5,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 256000,
      "default_max_tokens": 25600,
      "can_reason": false,
      "supports_attachments": true
    },
    {
      "id": "ministral-8b-2512",
      "name": "Ministral 3 8b 2512",
      "cost_per_1m_in": 0.15,
      "cost_per_1m_out": 0.15,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 256000,
      "default_max_tokens": 25600,
      "can_reason": false,
      "supports_attachments": true
    },
    {
      "id": "ministral-3b-2512",
      "name": "Ministral 3 3b 2512",
      "cost_per_1m_in": 0.1,
      "cost_per_1m_out": 0.1,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 256000,
      "default_max_tokens": 25600,
      "can_reason": false,
      "supports_attachments": true
    },
    {
      "id": "ministral-14b-2512",
      "name": "Ministral 3 14b 2512",
      "cost_per_1m_in": 0.2,
      "cost_per_1m_out": 0.2,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 256000,
      "default_max_tokens": 25600,
      "can_reason": false,
      "supports_attachments": true
    },
    {
      "id": "intellect-3",
      "name": "INTELLECT-3",
      "cost_per_1m_in": 0.179,
      "cost_per_1m_out": 0.984,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 12800,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "gpt-5.1",
      "name": "GPT 5.1",
      "cost_per_1m_in": 1.234,
      "cost_per_1m_out": 9.838,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 400000,
      "default_max_tokens": 40000,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "nemotron-nano-v2-12b",
      "name": "Nemotron Nano V2 12b",
      "cost_per_1m_in": 0.215,
      "cost_per_1m_out": 0.635,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 12800,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "claude-haiku-4-5",
      "name": "Claude Haiku 4.5",
      "cost_per_1m_in": 0.894,
      "cost_per_1m_out": 4.472,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 200000,
      "default_max_tokens": 20000,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "claude-4-5-sonnet",
      "name": "Claude 4.5 Sonnet",
      "cost_per_1m_in": 2.683,
      "cost_per_1m_out": 13.416,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 200000,
      "default_max_tokens": 20000,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "magistral-small-2509",
      "name": "Magistral Small 2509",
      "cost_per_1m_in": 0.5,
      "cost_per_1m_out": 1.5,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 12800,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "magistral-medium-2509",
      "name": "Magistral Medium 2509",
      "cost_per_1m_in": 2,
      "cost_per_1m_out": 5,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 12800,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "hermes-4-70b",
      "name": "Hermes 4 70B",
      "cost_per_1m_in": 0.116,
      "cost_per_1m_out": 0.358,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 12800,
      "can_reason": false,
      "supports_attachments": false
    },
    {
      "id": "gpt-5",
      "name": "GPT 5",
      "cost_per_1m_in": 1.234,
      "cost_per_1m_out": 9.838,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 400000,
      "default_max_tokens": 40000,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "gpt-oss-120b",
      "name": "GPT Oss 120b",
      "cost_per_1m_in": 0.035496,
      "cost_per_1m_out": 0.17748,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 131000,
      "default_max_tokens": 13100,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "qwen3-30b-a3b-instruct-2507",
      "name": "Qwen3 30B A3B Instruct 2507",
      "cost_per_1m_in": 0.089,
      "cost_per_1m_out": 0.268,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 262000,
      "default_max_tokens": 26200,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "gpt-oss-20b",
      "name": "GPT Oss 20b",
      "cost_per_1m_in": 0.026622,
      "cost_per_1m_out": 0.124236,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 131000,
      "default_max_tokens": 13100,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "mistral-7b-instruct-v0.3",
      "name": "Mistral 7B Instruct v0.3",
      "cost_per_1m_in": 0.1,
      "cost_per_1m_out": 0.1,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 127000,
      "default_max_tokens": 12700,
      "can_reason": false,
      "supports_attachments": false
    },
    {
      "id": "mistral-small-3.2-24b-instruct-2506",
      "name": "Mistral Small 3.2 24B Instruct 2506",
      "cost_per_1m_in": 0.09,
      "cost_per_1m_out": 0.28,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 12800,
      "can_reason": false,
      "supports_attachments": true
    },
    {
      "id": "mistral-large-2402",
      "name": "Mistral Large 2402",
      "cost_per_1m_in": 3.846,
      "cost_per_1m_out": 11.627,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 32000,
      "default_max_tokens": 3200,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "pixtral-large-2502",
      "name": "Pixtral Large 25.02",
      "cost_per_1m_in": 1.789,
      "cost_per_1m_out": 5.366,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 12800,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "qwen3-235b-a22b-instruct-2507",
      "name": "Qwen3 235B A22B Instruct 2507",
      "cost_per_1m_in": 0.062118,
      "cost_per_1m_out": 0.408204,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 131000,
      "default_max_tokens": 13100,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "qwen3-coder-30b-a3b-instruct",
      "name": "Qwen3 Coder 30b a3b Instruct",
      "cost_per_1m_in": 0.053244,
      "cost_per_1m_out": 0.22185,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 262000,
      "default_max_tokens": 26200,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "qwen3-32b",
      "name": "Qwen3 32B",
      "cost_per_1m_in": 0.089,
      "cost_per_1m_out": 0.268,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 40000,
      "default_max_tokens": 4000,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "nova-lite-v1",
      "name": "Nova Lite 1.0",
      "cost_per_1m_in": 0.062,
      "cost_per_1m_out": 0.247,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 300000,
      "default_max_tokens": 30000,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "claude-sonnet-4",
      "name": "Claude Sonnet 4",
      "cost_per_1m_in": 2.601,
      "cost_per_1m_out": 13.01,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 200000,
      "default_max_tokens": 20000,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "gpt-4.1-mini",
      "name": "GPT 4.1 mini",
      "cost_per_1m_in": 0.39,
      "cost_per_1m_out": 1.53,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 1047576,
      "default_max_tokens": 104757,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "gpt-4.1-nano",
      "name": "GPT 4.1 nano",
      "cost_per_1m_in": 0.1,
      "cost_per_1m_out": 0.39,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 1047576,
      "default_max_tokens": 104757,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "nova-micro-v1",
      "name": "Nova Micro 1.0",
      "cost_per_1m_in": 0.036,
      "cost_per_1m_out": 0.143,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 12800,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "gpt-4.1",
      "name": "GPT 4.1",
      "cost_per_1m_in": 1.968,
      "cost_per_1m_out": 7.872,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 1047576,
      "default_max_tokens": 104757,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "nova-pro-v1",
      "name": "Nova Pro 1.0",
      "cost_per_1m_in": 0.824,
      "cost_per_1m_out": 3.295,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 300000,
      "default_max_tokens": 30000,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "llama-3.1-nemotron-ultra-253b-v1",
      "name": "Llama 3.1 Nemotron Ultra 253B v1",
      "cost_per_1m_in": 0.537,
      "cost_per_1m_out": 1.61,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 12800,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "llama-4-maverick",
      "name": "Llama 4 Maverick",
      "cost_per_1m_in": 0.124236,
      "cost_per_1m_out": 0.602832,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 1050000,
      "default_max_tokens": 105000,
      "can_reason": false,
      "supports_attachments": false
    },
    {
      "id": "deepseek-v3-0324",
      "name": "DeepSeek V3 0324",
      "cost_per_1m_in": 0.26622,
      "cost_per_1m_out": 0.8874,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 163840,
      "default_max_tokens": 16384,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "mistral-small-2503",
      "name": "Mistral Small 2503",
      "cost_per_1m_in": 0.1,
      "cost_per_1m_out": 0.3,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 12800,
      "can_reason": false,
      "supports_attachments": true
    },
    {
      "id": "mistral-small-2506",
      "name": "Mistral Small 2506",
      "cost_per_1m_in": 0.1,
      "cost_per_1m_out": 0.3,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 131072,
      "default_max_tokens": 13107,
      "can_reason": false,
      "supports_attachments": true
    },
    {
      "id": "gemini-2.0-flash-001",
      "name": "Gemini 2.0 Flash",
      "cost_per_1m_in": 0.13416,
      "cost_per_1m_out": 0.53664,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 1048576,
      "default_max_tokens": 104857,
      "can_reason": false,
      "supports_attachments": true
    },
    {
      "id": "gemini-2.0-flash-lite-001",
      "name": "Gemini 2.0 Flash Lite",
      "cost_per_1m_in": 0.06708,
      "cost_per_1m_out": 0.26832,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 1048576,
      "default_max_tokens": 104857,
      "can_reason": false,
      "supports_attachments": true
    },
    {
      "id": "gemini-2.5-flash",
      "name": "Gemini 2.5 Flash",
      "cost_per_1m_in": 0.26832,
      "cost_per_1m_out": 2.236,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 1048576,
      "default_max_tokens": 104857,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "gemini-2.5-pro",
      "name": "Gemini 2.5 Pro",
      "cost_per_1m_in": 1.3416,
      "cost_per_1m_out": 8.944,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 1048576,
      "default_max_tokens": 104857,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "gemma-3-27b-it",
      "name": "Gemma 3 27b it",
      "cost_per_1m_in": 0.089,
      "cost_per_1m_out": 0.268,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 131000,
      "default_max_tokens": 13100,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "deepseek-r1-0528",
      "name": "DeepSeek R1 0528",
      "cost_per_1m_in": 0.585084,
      "cost_per_1m_out": 2.30724,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 164000,
      "default_max_tokens": 16400,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "codestral-2508",
      "name": "Codestral 25.08",
      "cost_per_1m_in": 0.3,
      "cost_per_1m_out": 0.9,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 256000,
      "default_max_tokens": 25600,
      "can_reason": false,
      "supports_attachments": false
    },
    {
      "id": "llama-3.3-70b-instruct",
      "name": "Llama 3.3 70B Instruct",
      "cost_per_1m_in": 0.08874,
      "cost_per_1m_out": 0.274994,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 131000,
      "default_max_tokens": 13100,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "gpt-4o",
      "name": "GPT 4o",
      "cost_per_1m_in": 2.38664,
      "cost_per_1m_out": 9.5466,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 12800,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "gpt-5-mini",
      "name": "GPT 5 mini",
      "cost_per_1m_in": 0.25,
      "cost_per_1m_out": 1.968,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 400000,
      "default_max_tokens": 40000,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "gpt-5-nano",
      "name": "GPT 5 nano",
      "cost_per_1m_in": 0.054,
      "cost_per_1m_out": 0.394,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 400000,
      "default_max_tokens": 40000,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "mistral-large-2411",
      "name": "Mistral Large 2411",
      "cost_per_1m_in": 1.8,
      "cost_per_1m_out": 5.4,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 131072,
      "default_max_tokens": 13107,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "hermes-4-405b",
      "name": "Hermes 4 405B",
      "cost_per_1m_in": 0.894,
      "cost_per_1m_out": 2.683,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 12800,
      "can_reason": false,
      "supports_attachments": false
    },
    {
      "id": "mistral-medium-2508",
      "name": "Mistral Medium 2508",
      "cost_per_1m_in": 0.4,
      "cost_per_1m_out": 2,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 12800,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "devstral-medium-2507",
      "name": "Devstral Medium 2507",
      "cost_per_1m_in": 0.4,
      "cost_per_1m_out": 2,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 131072,
      "default_max_tokens": 13107,
      "can_reason": false,
      "supports_attachments": false
    },
    {
      "id": "mistral-nemo-instruct-2407",
      "name": "Mistral Nemo 2407",
      "cost_per_1m_in": 0.13,
      "cost_per_1m_out": 0.13,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 131072,
      "default_max_tokens": 13107,
      "can_reason": false,
      "supports_attachments": false
    },
    {
      "id": "devstral-small-2507",
      "name": "Devstral Small 2507",
      "cost_per_1m_in": 0.1,
      "cost_per_1m_out": 0.3,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 131072,
      "default_max_tokens": 13107,
      "can_reason": false,
      "supports_attachments": false
    },
    {
      "id": "llama-3.1-405b-instruct",
      "name": "Llama 3.1 405B Instruct",
      "cost_per_1m_in": 1.75,
      "cost_per_1m_out": 1.75,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 12800,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    },
    {
      "id": "gpt-4o-mini",
      "name": "GPT 4o mini",
      "cost_per_1m_in": 0.1432,
      "cost_per_1m_out": 0.5728,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 12800,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": true
    },
    {
      "id": "llama-3.1-8b-instruct",
      "name": "Llama 3.1 8B Instruct",
      "cost_per_1m_in": 0.018,
      "cost_per_1m_out": 0.054,
      "cost_per_1m_in_cached": 0,
      "cost_per_1m_out_cached": 0,
      "context_window": 128000,
      "default_max_tokens": 12800,
      "can_reason": true,
      "reasoning_levels": [
        "low",
        "medium",
        "high"
      ],
      "default_reasoning_effort": "medium",
      "supports_attachments": false
    }
  ]
}
