provider.json

  1{
  2  "name": "Charm Hyper",
  3  "id": "hyper",
  4  "api_endpoint": "https://hyper.charm.land/api/v1/fantasy",
  5  "type": "hyper",
  6  "default_large_model_id": "kimi-k2.5",
  7  "default_small_model_id": "gpt-oss-120b",
  8  "models": [
  9    {
 10      "id": "deepseek-v4-flash",
 11      "name": "DeepSeek V4 Flash",
 12      "cost_per_1m_in": 1.55,
 13      "cost_per_1m_out": 2.28,
 14      "cost_per_1m_in_cached": 0.38,
 15      "cost_per_1m_out_cached": 0,
 16      "context_window": 1048576,
 17      "default_max_tokens": 104857,
 18      "can_reason": true,
 19      "supports_attachments": false
 20    },
 21    {
 22      "id": "deepseek-v4-pro",
 23      "name": "DeepSeek V4 Pro",
 24      "cost_per_1m_in": 4.45,
 25      "cost_per_1m_out": 5.5,
 26      "cost_per_1m_in_cached": 0.35,
 27      "cost_per_1m_out_cached": 0,
 28      "context_window": 1048576,
 29      "default_max_tokens": 104857,
 30      "can_reason": true,
 31      "supports_attachments": false
 32    },
 33    {
 34      "id": "gemma-4-26b-a4b-it",
 35      "name": "Gemma 4 26B A4B",
 36      "cost_per_1m_in": 0.145,
 37      "cost_per_1m_out": 0.5,
 38      "cost_per_1m_in_cached": 0.08,
 39      "cost_per_1m_out_cached": 0,
 40      "context_window": 32768,
 41      "default_max_tokens": 3276,
 42      "can_reason": true,
 43      "supports_attachments": false
 44    },
 45    {
 46      "id": "glm-5",
 47      "name": "GLM-5",
 48      "cost_per_1m_in": 1,
 49      "cost_per_1m_out": 3,
 50      "cost_per_1m_in_cached": 0.5,
 51      "cost_per_1m_out_cached": 0,
 52      "context_window": 202752,
 53      "default_max_tokens": 20275,
 54      "can_reason": true,
 55      "supports_attachments": false
 56    },
 57    {
 58      "id": "glm-5.1",
 59      "name": "GLM-5.1",
 60      "cost_per_1m_in": 1.5,
 61      "cost_per_1m_out": 4.4,
 62      "cost_per_1m_in_cached": 0.26,
 63      "cost_per_1m_out_cached": 0,
 64      "context_window": 202752,
 65      "default_max_tokens": 20275,
 66      "can_reason": true,
 67      "supports_attachments": false
 68    },
 69    {
 70      "id": "gpt-oss-120b",
 71      "name": "gpt-oss-120b",
 72      "cost_per_1m_in": 0.1,
 73      "cost_per_1m_out": 0.4,
 74      "cost_per_1m_in_cached": 0.01,
 75      "cost_per_1m_out_cached": 0.04,
 76      "context_window": 131072,
 77      "default_max_tokens": 13107,
 78      "can_reason": true,
 79      "reasoning_levels": [
 80        "low",
 81        "medium",
 82        "high"
 83      ],
 84      "default_reasoning_effort": "medium",
 85      "supports_attachments": false
 86    },
 87    {
 88      "id": "kimi-k2.5",
 89      "name": "Kimi K2.5",
 90      "cost_per_1m_in": 0.445,
 91      "cost_per_1m_out": 2,
 92      "cost_per_1m_in_cached": 0.225,
 93      "cost_per_1m_out_cached": 1.1,
 94      "context_window": 262144,
 95      "default_max_tokens": 26214,
 96      "can_reason": true,
 97      "supports_attachments": true
 98    },
 99    {
100      "id": "kimi-k2.6",
101      "name": "Kimi K2.6",
102      "cost_per_1m_in": 0.8,
103      "cost_per_1m_out": 4,
104      "cost_per_1m_in_cached": 0.25,
105      "cost_per_1m_out_cached": 0,
106      "context_window": 262142,
107      "default_max_tokens": 26214,
108      "can_reason": true,
109      "supports_attachments": true
110    },
111    {
112      "id": "llama-3.3-70b-instruct",
113      "name": "Llama 3.3 70B Instruct",
114      "cost_per_1m_in": 0.1,
115      "cost_per_1m_out": 0.32,
116      "cost_per_1m_in_cached": 0.05,
117      "cost_per_1m_out_cached": 0.2,
118      "context_window": 128000,
119      "default_max_tokens": 12800,
120      "can_reason": true,
121      "supports_attachments": false
122    },
123    {
124      "id": "llama-4-maverick-17b-128e-instruct-fp8",
125      "name": "Llama 4 Maverick 17B 128E Instruct FP8",
126      "cost_per_1m_in": 0.15,
127      "cost_per_1m_out": 0.6,
128      "cost_per_1m_in_cached": 0.075,
129      "cost_per_1m_out_cached": 0.3,
130      "context_window": 430000,
131      "default_max_tokens": 43000,
132      "can_reason": true,
133      "supports_attachments": true
134    },
135    {
136      "id": "mistral-large-instruct-2411",
137      "name": "Mistral Large Instruct 2411",
138      "cost_per_1m_in": 2,
139      "cost_per_1m_out": 6,
140      "cost_per_1m_in_cached": 1,
141      "cost_per_1m_out_cached": 4,
142      "context_window": 128000,
143      "default_max_tokens": 12800,
144      "can_reason": false,
145      "supports_attachments": true
146    },
147    {
148      "id": "qwen3-coder-480b-a35b-instruct-int4-mixed-ar",
149      "name": "Qwen3 Coder 480B A35B Instruct INT4 Mixed AR",
150      "cost_per_1m_in": 0.22,
151      "cost_per_1m_out": 0.95,
152      "cost_per_1m_in_cached": 0.11,
153      "cost_per_1m_out_cached": 0.44,
154      "context_window": 106000,
155      "default_max_tokens": 10600,
156      "can_reason": false,
157      "supports_attachments": false
158    },
159    {
160      "id": "qwen3-next-80b-a3b-instruct",
161      "name": "Qwen3 Next 80B A3B Instruct",
162      "cost_per_1m_in": 0.06,
163      "cost_per_1m_out": 0.6,
164      "cost_per_1m_in_cached": 0.03,
165      "cost_per_1m_out_cached": 0.12,
166      "context_window": 262144,
167      "default_max_tokens": 26214,
168      "can_reason": false,
169      "supports_attachments": false
170    }
171  ]
172}