models.json

  1{
  2	"providers": {
  3		"plexus": {
  4			"baseUrl": "http://100.77.116.78:4000/v1",
  5			"apiKey": "!fnox get PLEXUS_API_KEY",
  6			"api": "openai-completions",
  7			"models": [
  8				{
  9					"id": "glm-4.7",
 10					"name": "GLM 4.7",
 11					"reasoning": true,
 12					"input": ["text"],
 13					"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
 14					"contextWindow": 200000,
 15					"maxTokens": 64000,
 16					"compat": {
 17						"supportsReasoningEffort": true,
 18						"supportsDeveloperRole": false
 19					}
 20				},
 21				{
 22					"id": "glm-4.7-flash",
 23					"name": "GLM-4.7-Flash",
 24					"reasoning": true,
 25					"input": ["text"],
 26					"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
 27					"contextWindow": 200000,
 28					"maxTokens": 131072,
 29					"compat": {
 30						"supportsReasoningEffort": true,
 31						"supportsDeveloperRole": false
 32					}
 33				},
 34				{
 35					"id": "glm-5",
 36					"name": "GLM-5",
 37					"reasoning": true,
 38					"input": ["text"],
 39					"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
 40					"contextWindow": 204800,
 41					"maxTokens": 131072,
 42					"compat": {
 43						"supportsReasoningEffort": true,
 44						"supportsDeveloperRole": false
 45					}
 46				},
 47				{
 48					"id": "deepseek-v3.2",
 49					"name": "DeepSeek V3.2",
 50					"reasoning": true,
 51					"input": ["text"],
 52					"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
 53					"contextWindow": 162816,
 54					"maxTokens": 8000,
 55					"compat": {
 56						"supportsReasoningEffort": true,
 57						"supportsDeveloperRole": false
 58					}
 59				},
 60				{
 61					"id": "gpt-oss-120b",
 62					"name": "GPT OSS 120B",
 63					"reasoning": true,
 64					"input": ["text"],
 65					"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
 66					"contextWindow": 128000,
 67					"maxTokens": 32768,
 68					"compat": {
 69						"supportsReasoningEffort": true,
 70						"supportsDeveloperRole": false
 71					}
 72				},
 73				{
 74					"id": "kimi-k2.5",
 75					"name": "Kimi K2.5",
 76					"reasoning": true,
 77					"input": ["text", "image"],
 78					"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
 79					"contextWindow": 262144,
 80					"maxTokens": 65536,
 81					"compat": {
 82						"supportsReasoningEffort": true,
 83						"supportsDeveloperRole": false
 84					}
 85				},
 86				{
 87					"id": "kimi-k2.5h",
 88					"name": "Kimi K2.5h",
 89					"reasoning": true,
 90					"input": ["text", "image"],
 91					"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
 92					"contextWindow": 262144,
 93					"maxTokens": 262144,
 94					"compat": {
 95						"supportsReasoningEffort": true,
 96						"supportsDeveloperRole": false
 97					}
 98				},
 99				{
100					"id": "gpt-5.3-codex",
101					"name": "GPT-5.3 Codex",
102					"reasoning": true,
103					"input": ["text", "image"],
104					"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
105					"contextWindow": 400000,
106					"maxTokens": 128000,
107					"compat": {
108						"supportsReasoningEffort": true,
109						"supportsDeveloperRole": false
110					}
111				},
112				{
113					"id": "minimax-m2.5",
114					"name": "MiniMax-M2.5",
115					"reasoning": true,
116					"input": ["text"],
117					"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
118					"contextWindow": 204800,
119					"maxTokens": 131072,
120					"compat": {
121						"supportsReasoningEffort": true,
122						"supportsDeveloperRole": false
123					}
124				},
125				{
126					"id": "minimax-m2.5h",
127					"name": "MiniMax-M2.5h",
128					"reasoning": true,
129					"input": ["text"],
130					"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
131					"contextWindow": 204800,
132					"maxTokens": 131072,
133					"compat": {
134						"supportsReasoningEffort": true,
135						"supportsDeveloperRole": false
136					}
137				},
138				{
139					"id": "claude-sonnet-4.6",
140					"name": "Claude Sonnet 4.6",
141					"reasoning": true,
142					"input": ["text", "image"],
143					"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
144					"contextWindow": 1000000,
145					"maxTokens": 64000,
146					"compat": {
147						"supportsReasoningEffort": true,
148						"supportsDeveloperRole": false
149					}
150				},
151				{
152					"id": "gpt-5.4",
153					"name": "GPT-5.4",
154					"reasoning": true,
155					"input": ["text", "image"],
156					"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
157					"contextWindow": 1050000,
158					"maxTokens": 128000,
159					"compat": {
160						"supportsReasoningEffort": true,
161						"supportsDeveloperRole": false
162					}
163				},
164				{
165					"id": "gemini-3.1-flash-lite",
166					"name": "Gemini 3.1 Flash Lite Preview",
167					"reasoning": true,
168					"input": ["text", "image"],
169					"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
170					"contextWindow": 1048576,
171					"maxTokens": 65536,
172					"compat": {
173						"supportsReasoningEffort": true,
174						"supportsDeveloperRole": false
175					}
176				},
177				{
178					"id": "gemini-3.1-pro-preview",
179					"name": "Gemini 3.1 Pro Preview",
180					"reasoning": true,
181					"input": ["text", "image"],
182					"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
183					"contextWindow": 1048576,
184					"maxTokens": 65536,
185					"compat": {
186						"supportsReasoningEffort": true,
187						"supportsDeveloperRole": false
188					}
189				},
190				{
191					"id": "claude-opus-4.6",
192					"name": "Claude Opus 4.6",
193					"reasoning": true,
194					"input": ["text", "image"],
195					"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
196					"contextWindow": 1000000,
197					"maxTokens": 128000,
198					"compat": {
199						"supportsReasoningEffort": true,
200						"supportsDeveloperRole": false
201					}
202				},
203				{
204					"id": "nemotron-3-super-120b-a12b",
205					"name": "Nemotron 3 Super",
206					"reasoning": true,
207					"input": ["text"],
208					"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
209					"contextWindow": 262144,
210					"maxTokens": 65536,
211					"compat": {
212						"supportsReasoningEffort": true,
213						"supportsDeveloperRole": false
214					}
215				}
216			]
217		}
218	}
219}