cmd/ionet/main.go 🔗
@@ -175,6 +175,5 @@ func supportsTools(modelID string) bool {
"llama-4",
"mistral-nemo",
"qwen2.5",
- "gpt-oss",
)
}
Andrey Nering created
cmd/ionet/main.go | 1
internal/providers/configs/ionet.json | 38 +++++++++++++++++++++++++++++
2 files changed, 38 insertions(+), 1 deletion(-)
@@ -175,6 +175,5 @@ func supportsTools(modelID string) bool {
"llama-4",
"mistral-nemo",
"qwen2.5",
- "gpt-oss",
)
}
@@ -97,6 +97,44 @@
"supports_attachments": false,
"options": {}
},
+ {
+ "id": "openai/gpt-oss-120b",
+ "name": "OpenAI: gpt-oss-120b",
+ "cost_per_1m_in": 0.02,
+ "cost_per_1m_out": 0.1,
+ "cost_per_1m_in_cached": 0.01,
+ "cost_per_1m_out_cached": 0.04,
+ "context_window": 131072,
+ "default_max_tokens": 13107,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
+ {
+ "id": "openai/gpt-oss-20b",
+ "name": "OpenAI: gpt-oss-20b",
+ "cost_per_1m_in": 0.016,
+ "cost_per_1m_out": 0.06,
+ "cost_per_1m_in_cached": 0.008,
+ "cost_per_1m_out_cached": 0.032,
+ "context_window": 64000,
+ "default_max_tokens": 6400,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
+ "supports_attachments": false,
+ "options": {}
+ },
{
"id": "Qwen/Qwen3-Next-80B-A3B-Instruct",
"name": "Qwen: Qwen3 Next 80B A3B Instruct",