@@ -50,6 +50,8 @@ type Limits struct {
type Supports struct {
ToolCalls bool `json:"tool_calls,omitempty"`
ParallelToolCalls bool `json:"parallel_tool_calls,omitempty"`
+ MaxThinkingBudget int `json:"max_thinking_budget,omitempty"`
+ MinThinkingBudget int `json:"min_thinking_budget,omitempty"`
}
type Policy struct {
@@ -144,12 +146,34 @@ func modelsToCatwalk(m []Model) []catwalk.Model {
}
func modelToCatwalk(m Model) catwalk.Model {
+ canReason, reasoningLevels, defaultReasoning := detectReasoningCapabilities(m)
+
return catwalk.Model{
- ID: m.ID,
- Name: m.Name,
- DefaultMaxTokens: int64(m.Capabilities.Limits.MaxOutputTokens),
- ContextWindow: int64(m.Capabilities.Limits.MaxContextWindowTokens),
+ ID: m.ID,
+ Name: m.Name,
+ DefaultMaxTokens: int64(m.Capabilities.Limits.MaxOutputTokens),
+ ContextWindow: int64(m.Capabilities.Limits.MaxContextWindowTokens),
+ CanReason: canReason,
+ ReasoningLevels: reasoningLevels,
+ DefaultReasoningEffort: defaultReasoning,
+ }
+}
+
+func detectReasoningCapabilities(m Model) (canReason bool, levels []string, defaultLevel string) {
+ // Models with thinking budget (Claude, Gemini) support extended thinking without levels
+ if m.Capabilities.Supports.MaxThinkingBudget > 0 {
+ return true, nil, ""
}
+
+ // OpenAI o-series and GPT-5+ models support reasoning with effort levels
+ if strings.HasPrefix(m.ID, "o1") ||
+ strings.HasPrefix(m.ID, "o3") ||
+ strings.HasPrefix(m.ID, "o4") ||
+ strings.HasPrefix(m.ID, "gpt-5") {
+ return true, []string{"low", "medium", "high"}, "medium"
+ }
+
+ return false, nil, ""
}
func copilotToken() string {
@@ -15,7 +15,7 @@
"cost_per_1m_out_cached": 0,
"context_window": 144000,
"default_max_tokens": 16000,
- "can_reason": false,
+ "can_reason": true,
"supports_attachments": false,
"options": {}
},
@@ -28,7 +28,7 @@
"cost_per_1m_out_cached": 0,
"context_window": 144000,
"default_max_tokens": 16000,
- "can_reason": false,
+ "can_reason": true,
"supports_attachments": false,
"options": {}
},
@@ -41,7 +41,7 @@
"cost_per_1m_out_cached": 0,
"context_window": 216000,
"default_max_tokens": 16000,
- "can_reason": false,
+ "can_reason": true,
"supports_attachments": false,
"options": {}
},
@@ -54,7 +54,7 @@
"cost_per_1m_out_cached": 0,
"context_window": 144000,
"default_max_tokens": 16000,
- "can_reason": false,
+ "can_reason": true,
"supports_attachments": false,
"options": {}
},
@@ -67,7 +67,7 @@
"cost_per_1m_out_cached": 0,
"context_window": 128000,
"default_max_tokens": 64000,
- "can_reason": false,
+ "can_reason": true,
"supports_attachments": false,
"options": {}
},
@@ -106,7 +106,13 @@
"cost_per_1m_out_cached": 0,
"context_window": 400000,
"default_max_tokens": 128000,
- "can_reason": false,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
"supports_attachments": false,
"options": {}
},
@@ -119,7 +125,13 @@
"cost_per_1m_out_cached": 0,
"context_window": 264000,
"default_max_tokens": 64000,
- "can_reason": false,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
"supports_attachments": false,
"options": {}
},
@@ -132,7 +144,13 @@
"cost_per_1m_out_cached": 0,
"context_window": 264000,
"default_max_tokens": 64000,
- "can_reason": false,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
"supports_attachments": false,
"options": {}
},
@@ -145,7 +163,13 @@
"cost_per_1m_out_cached": 0,
"context_window": 400000,
"default_max_tokens": 128000,
- "can_reason": false,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
"supports_attachments": false,
"options": {}
},
@@ -158,7 +182,13 @@
"cost_per_1m_out_cached": 0,
"context_window": 400000,
"default_max_tokens": 128000,
- "can_reason": false,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
"supports_attachments": false,
"options": {}
},
@@ -171,7 +201,13 @@
"cost_per_1m_out_cached": 0,
"context_window": 264000,
"default_max_tokens": 64000,
- "can_reason": false,
+ "can_reason": true,
+ "reasoning_levels": [
+ "low",
+ "medium",
+ "high"
+ ],
+ "default_reasoning_effort": "medium",
"supports_attachments": false,
"options": {}
},