fix(io.net): adjust reasoning and reasoning levels (#255)

Andrey Nering created

* Gemma 4 is a thinking model
* Only the gpt-oss family of models support reasoning levels

Change summary

cmd/ionet/main.go                     | 20 +++++++--
internal/providers/configs/ionet.json | 56 ----------------------------
2 files changed, 16 insertions(+), 60 deletions(-)

Detailed changes

cmd/ionet/main.go 🔗

@@ -71,10 +71,11 @@ func main() {
 			continue
 		}
 
-		canReason := isReasoningModel(model.ID)
-		var reasoningLevels []string
-		var defaultReasoning string
-		if canReason {
+		var (
+			reasoningLevels  []string
+			defaultReasoning string
+		)
+		if supportsReasoningLevels(model.ID) {
 			reasoningLevels = []string{"low", "medium", "high"}
 			defaultReasoning = "medium"
 		}
@@ -95,7 +96,7 @@ func main() {
 			CostPer1MOutCached:     costPer1MOutCached,
 			ContextWindow:          int64(model.ContextWindow),
 			DefaultMaxTokens:       int64(model.ContextWindow) / 10,
-			CanReason:              canReason,
+			CanReason:              isReasoningModel(model.ID),
 			ReasoningLevels:        reasoningLevels,
 			DefaultReasoningEffort: defaultReasoning,
 			SupportsImages:         model.SupportsImagesInput,
@@ -166,6 +167,15 @@ func isReasoningModel(modelID string) bool {
 		"glm",
 		"gpt-oss",
 		"llama",
+		"gemma-4",
+	)
+}
+
+// supportsReasoningLevels returns whether the models supports reasoning levels.
+func supportsReasoningLevels(modelID string) bool {
+	return xstrings.ContainsAnyOf(
+		strings.ToLower(modelID),
+		"gpt-oss",
 	)
 }
 

internal/providers/configs/ionet.json 🔗

@@ -16,7 +16,7 @@
       "cost_per_1m_out_cached": 0,
       "context_window": 262144,
       "default_max_tokens": 26214,
-      "can_reason": false,
+      "can_reason": true,
       "supports_attachments": false
     },
     {
@@ -41,12 +41,6 @@
       "context_window": 128000,
       "default_max_tokens": 12800,
       "can_reason": true,
-      "reasoning_levels": [
-        "low",
-        "medium",
-        "high"
-      ],
-      "default_reasoning_effort": "medium",
       "supports_attachments": false
     },
     {
@@ -95,12 +89,6 @@
       "context_window": 262144,
       "default_max_tokens": 26214,
       "can_reason": true,
-      "reasoning_levels": [
-        "low",
-        "medium",
-        "high"
-      ],
-      "default_reasoning_effort": "medium",
       "supports_attachments": false
     },
     {
@@ -113,12 +101,6 @@
       "context_window": 262144,
       "default_max_tokens": 26214,
       "can_reason": true,
-      "reasoning_levels": [
-        "low",
-        "medium",
-        "high"
-      ],
-      "default_reasoning_effort": "medium",
       "supports_attachments": true
     },
     {
@@ -131,12 +113,6 @@
       "context_window": 262144,
       "default_max_tokens": 26214,
       "can_reason": true,
-      "reasoning_levels": [
-        "low",
-        "medium",
-        "high"
-      ],
-      "default_reasoning_effort": "medium",
       "supports_attachments": true
     },
     {
@@ -197,12 +173,6 @@
       "context_window": 200000,
       "default_max_tokens": 20000,
       "can_reason": true,
-      "reasoning_levels": [
-        "low",
-        "medium",
-        "high"
-      ],
-      "default_reasoning_effort": "medium",
       "supports_attachments": false
     },
     {
@@ -215,12 +185,6 @@
       "context_window": 202752,
       "default_max_tokens": 20275,
       "can_reason": true,
-      "reasoning_levels": [
-        "low",
-        "medium",
-        "high"
-      ],
-      "default_reasoning_effort": "medium",
       "supports_attachments": false
     },
     {
@@ -233,12 +197,6 @@
       "context_window": 200000,
       "default_max_tokens": 20000,
       "can_reason": true,
-      "reasoning_levels": [
-        "low",
-        "medium",
-        "high"
-      ],
-      "default_reasoning_effort": "medium",
       "supports_attachments": false
     },
     {
@@ -251,12 +209,6 @@
       "context_window": 202752,
       "default_max_tokens": 20275,
       "can_reason": true,
-      "reasoning_levels": [
-        "low",
-        "medium",
-        "high"
-      ],
-      "default_reasoning_effort": "medium",
       "supports_attachments": false
     },
     {
@@ -269,12 +221,6 @@
       "context_window": 202752,
       "default_max_tokens": 20275,
       "can_reason": true,
-      "reasoning_levels": [
-        "low",
-        "medium",
-        "high"
-      ],
-      "default_reasoning_effort": "medium",
       "supports_attachments": false
     }
   ]