feat: add large/small model defaults

Kujtim Hoxha created

Change summary

internal/providers/configs/anthropic.json  |  3 ++-
internal/providers/configs/azure.json      |  3 ++-
internal/providers/configs/bedrock.json    |  3 ++-
internal/providers/configs/gemini.json     |  3 ++-
internal/providers/configs/openai.json     |  3 ++-
internal/providers/configs/openrouter.json |  3 ++-
internal/providers/configs/vertexai.json   |  3 ++-
internal/providers/configs/xai.json        |  3 ++-
internal/providers/providers.go            |  2 +-
pkg/provider/provider.go                   | 15 ++++++++-------
10 files changed, 25 insertions(+), 16 deletions(-)

Detailed changes

internal/providers/configs/anthropic.json 🔗

@@ -4,7 +4,8 @@
   "type": "anthropic",
   "api_key": "$ANTHROPIC_API_KEY",
   "api_endpoint": "$ANTHROPIC_API_ENDPOINT",
-  "default_model_id": "claude-sonnet-4-20250514",
+  "default_large_model_id": "claude-sonnet-4-20250514",
+  "default_small_model_id": "claude-3-5-haiku-20241022",
   "models": [
     {
       "id": "claude-opus-4-20250514",

internal/providers/configs/azure.json 🔗

@@ -4,7 +4,8 @@
   "type": "azure",
   "api_key": "$AZURE_OPENAI_API_KEY",
   "api_endpoint": "$AZURE_OPENAI_API_ENDPOINT",
-  "default_model_id": "o4-mini",
+  "default_large_model_id": "o4-mini",
+  "default_small_model_id": "gpt-4o",
   "models": [
     {
       "id": "codex-mini-latest",

internal/providers/configs/bedrock.json 🔗

@@ -4,7 +4,8 @@
   "type": "bedrock",
   "api_key": "",
   "api_endpoint": "",
-  "default_model_id": "claude-sonnet-4-20250514",
+  "default_large_model_id": "anthropic.claude-sonnet-4-20250514-v1:0",
+  "default_small_model_id": "anthropic.claude-3-5-haiku-20241022-v1:0",
   "models": [
     {
       "id": "anthropic.claude-opus-4-20250514-v1:0",

internal/providers/configs/gemini.json 🔗

@@ -4,7 +4,8 @@
   "type": "gemini",
   "api_key": "$GEMINI_API_KEY",
   "api_endpoint": "$GEMINI_API_ENDPOINT",
-  "default_model_id": "gemini-2.5-pro",
+  "default_large_model_id": "gemini-2.5-pro",
+  "default_small_model_id": "gemini-2.5-flash",
   "models": [
     {
       "id": "gemini-2.5-pro",

internal/providers/configs/openai.json 🔗

@@ -4,7 +4,8 @@
   "type": "openai",
   "api_key": "$OPENAI_API_KEY",
   "api_endpoint": "$OPENAI_API_ENDPOINT",
-  "default_model_id": "o4-mini",
+  "default_large_model_id": "o4-mini",
+  "default_small_model_id": "gpt-4o",
   "models": [
     {
       "id": "codex-mini-latest",

internal/providers/configs/openrouter.json 🔗

@@ -4,7 +4,8 @@
   "api_key": "$OPENROUTER_API_KEY",
   "api_endpoint": "https://openrouter.ai/api/v1",
   "type": "openai",
-  "default_model_id": "anthropic/claude-sonnet-4",
+  "default_large_model_id": "anthropic/claude-sonnet-4",
+  "default_small_model_id": "anthropic/claude-3-5-haiku",
   "models": [
     {
       "id": "mistralai/mistral-small-3.2-24b-instruct:free",

internal/providers/configs/vertexai.json 🔗

@@ -4,7 +4,8 @@
   "type": "vertex",
   "api_key": "",
   "api_endpoint": "",
-  "default_model_id": "gemini-2.5-pro",
+  "default_large_model_id": "gemini-2.5-pro",
+  "default_small_model_id": "gemini-2.5-flash",
   "models": [
     {
       "id": "gemini-2.5-pro",

internal/providers/configs/xai.json 🔗

@@ -4,7 +4,8 @@
   "api_key": "$XAI_API_KEY",
   "api_endpoint": "https://api.x.ai/v1",
   "type": "openai",
-  "default_model_id": "grok-3",
+  "default_large_model_id": "grok-3",
+  "default_small_model_id": "grok-3-mini",
   "models": [
     {
       "id": "grok-3-mini",

internal/providers/providers.go 🔗

@@ -37,8 +37,8 @@ var bedrockConfig []byte
 type ProviderFunc func() provider.Provider
 
 var providerRegistry = []ProviderFunc{
-	openAIProvider,
 	anthropicProvider,
+	openAIProvider,
 	geminiProvider,
 	azureProvider,
 	bedrockProvider,

pkg/provider/provider.go 🔗

@@ -33,13 +33,14 @@ const (
 
 // Provider represents an AI provider configuration.
 type Provider struct {
-	Name           string            `json:"name"`
-	ID             InferenceProvider `json:"id"`
-	APIKey         string            `json:"api_key,omitempty"`
-	APIEndpoint    string            `json:"api_endpoint,omitempty"`
-	Type           Type              `json:"type,omitempty"`
-	DefaultModelID string            `json:"default_model_id,omitempty"`
-	Models         []Model           `json:"models,omitempty"`
+	Name                string            `json:"name"`
+	ID                  InferenceProvider `json:"id"`
+	APIKey              string            `json:"api_key,omitempty"`
+	APIEndpoint         string            `json:"api_endpoint,omitempty"`
+	Type                Type              `json:"type,omitempty"`
+	DefaultLargeModelID string            `json:"default_large_model_id,omitempty"`
+	DefaultSmallModelID string            `json:"default_small_model_id,omitempty"`
+	Models              []Model           `json:"models,omitempty"`
 }
 
 // Model represents an AI model configuration.