models.json

  1{
  2	"providers": {
  3		"plexus": {
  4			"baseUrl": "http://100.108.156.33:4000/v1",
  5			"apiKey": "!fnox get PLEXUS_API_KEY",
  6			"api": "openai-completions",
  7			"models": [
  8				{
  9					"id": "claude-opus-4.5",
 10					"name": "Claude Opus 4.5",
 11					"reasoning": true,
 12					"supportsReasoningEffort": true,
 13					"supportsDeveloperRole": false,
 14					"input": ["text", "image"],
 15					"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
 16					"contextWindow": 200000,
 17					"maxTokens": 50000
 18				},
 19				{
 20					"id": "kimi-k2.5h",
 21					"name": "Kimi K2.5h",
 22					"reasoning": true,
 23					"supportsReasoningEffort": true,
 24					"supportsDeveloperRole": false,
 25					"input": ["text", "image"],
 26					"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
 27					"contextWindow": 256000,
 28					"maxTokens": 64000
 29				},
 30				{
 31					"id": "minimax-m2.5",
 32					"name": "MiniMax M2.5",
 33					"reasoning": true,
 34					"supportsReasoningEffort": true,
 35					"supportsDeveloperRole": false,
 36					"input": ["text"],
 37					"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
 38					"contextWindow": 205000,
 39					"maxTokens": 51250
 40				},
 41				{
 42					"id": "glm-5",
 43					"name": "GLM 5",
 44					"reasoning": true,
 45					"supportsReasoningEffort": true,
 46					"supportsDeveloperRole": false,
 47					"input": ["text"],
 48					"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
 49					"contextWindow": 202752,
 50					"maxTokens": 50688
 51				},
 52				{
 53					"id": "qwen3.5-397b-a17b",
 54					"name": "Qwen3.5 387B A17B",
 55					"reasoning": true,
 56					"supportsReasoningEffort": true,
 57					"supportsDeveloperRole": false,
 58					"input": ["text"],
 59					"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
 60					"contextWindow": 202752,
 61					"maxTokens": 50688
 62				},
 63				{
 64					"id": "glm-4.7",
 65					"name": "GLM 4.7",
 66					"reasoning": true,
 67					"supportsReasoningEffort": true,
 68					"supportsDeveloperRole": false,
 69					"input": ["text"],
 70					"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
 71					"contextWindow": 202752,
 72					"maxTokens": 50688
 73				},
 74				{
 75					"id": "deepseek-v3.2",
 76					"name": "DeepSeek V3.2",
 77					"reasoning": true,
 78					"supportsReasoningEffort": true,
 79					"supportsDeveloperRole": false,
 80					"input": ["text"],
 81					"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
 82					"contextWindow": 162816,
 83					"maxTokens": 40704
 84				},
 85				{
 86					"id": "gpt-oss-120b",
 87					"name": "GPT OSS 120B",
 88					"reasoning": true,
 89					"supportsReasoningEffort": true,
 90					"supportsDeveloperRole": false,
 91					"input": ["text", "image"],
 92					"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
 93					"contextWindow": 131072,
 94					"maxTokens": 32768
 95				},
 96				{
 97					"id": "diff-apply",
 98					"name": "Diff Apply",
 99					"reasoning": false,
100					"supportsDeveloperRole": false,
101					"input": ["text"],
102					"cost": { "input": 0, "output": 0, "cacheRead": 0, "cacheWrite": 0 },
103					"contextWindow": 256000,
104					"maxTokens": 64000
105				}
106			]
107		}
108	}
109}