Detailed changes
@@ -187,7 +187,7 @@
"title": "Model ID",
"description": "Unique identifier for the model"
},
- "model": {
+ "name": {
"type": "string",
"title": "Model Name",
"description": "Display name of the model"
@@ -252,10 +252,7 @@
"type": "object",
"required": [
"id",
- "model",
- "cost_per_1m_in",
- "cost_per_1m_out",
- "cost_per_1m_in_cached",
+ "name",
"cost_per_1m_out_cached",
"context_window",
"default_max_tokens",
@@ -56,18 +56,18 @@ const (
)
type Model struct {
- ID string `json:"id" jsonschema:"title=Model ID,description=Unique identifier for the model"`
- Name string `json:"model" jsonschema:"title=Model Name,description=Display name of the model"`
- CostPer1MIn float64 `json:"cost_per_1m_in" jsonschema:"title=Input Cost,description=Cost per 1 million input tokens,minimum=0"`
- CostPer1MOut float64 `json:"cost_per_1m_out" jsonschema:"title=Output Cost,description=Cost per 1 million output tokens,minimum=0"`
- CostPer1MInCached float64 `json:"cost_per_1m_in_cached" jsonschema:"title=Cached Input Cost,description=Cost per 1 million cached input tokens,minimum=0"`
- CostPer1MOutCached float64 `json:"cost_per_1m_out_cached" jsonschema:"title=Cached Output Cost,description=Cost per 1 million cached output tokens,minimum=0"`
+ ID string `json:"id" jsonschema:"title=Model ID,description=Unique identifier for the model, the API model"`
+ Name string `json:"name" jsonschema:"title=Model Name,description=Display name of the model"`
+ CostPer1MIn float64 `json:"cost_per_1m_in,omitempty" jsonschema:"title=Input Cost,description=Cost per 1 million input tokens,minimum=0"`
+ CostPer1MOut float64 `json:"cost_per_1m_out,omitempty" jsonschema:"title=Output Cost,description=Cost per 1 million output tokens,minimum=0"`
+ CostPer1MInCached float64 `json:"cost_per_1m_in_cached,omitempty" jsonschema:"title=Cached Input Cost,description=Cost per 1 million cached input tokens,minimum=0"`
+ CostPer1MOutCached float64 `json:"cost_per_1m_out_cached,omitempty" jsonschema:"title=Cached Output Cost,description=Cost per 1 million cached output tokens,minimum=0"`
ContextWindow int64 `json:"context_window" jsonschema:"title=Context Window,description=Maximum context window size in tokens,minimum=1"`
DefaultMaxTokens int64 `json:"default_max_tokens" jsonschema:"title=Default Max Tokens,description=Default maximum tokens for responses,minimum=1"`
- CanReason bool `json:"can_reason" jsonschema:"title=Can Reason,description=Whether the model supports reasoning capabilities"`
- ReasoningEffort string `json:"reasoning_effort" jsonschema:"title=Reasoning Effort,description=Default reasoning effort level for reasoning models"`
- HasReasoningEffort bool `json:"has_reasoning_effort" jsonschema:"title=Has Reasoning Effort,description=Whether the model supports reasoning effort configuration"`
- SupportsImages bool `json:"supports_attachments" jsonschema:"title=Supports Images,description=Whether the model supports image attachments"`
+ CanReason bool `json:"can_reason,omitempty" jsonschema:"title=Can Reason,description=Whether the model supports reasoning capabilities"`
+ ReasoningEffort string `json:"reasoning_effort,omitempty" jsonschema:"title=Reasoning Effort,description=Default reasoning effort level for reasoning models"`
+ HasReasoningEffort bool `json:"has_reasoning_effort,omitempty" jsonschema:"title=Has Reasoning Effort,description=Whether the model supports reasoning effort configuration"`
+ SupportsImages bool `json:"supports_attachments,omitempty" jsonschema:"title=Supports Images,description=Whether the model supports image attachments"`
}
type VertexAIOptions struct {
@@ -183,7 +183,7 @@ func (o *openaiClient) preparedParams(messages []openai.ChatCompletionMessagePar
case "high":
params.ReasoningEffort = shared.ReasoningEffortHigh
default:
- params.ReasoningEffort = shared.ReasoningEffortMedium
+ params.ReasoningEffort = shared.ReasoningEffort(reasoningEffort)
}
} else {
params.MaxTokens = openai.Int(maxTokens)
@@ -1,6 +1,8 @@
package models
import (
+ "slices"
+
"github.com/charmbracelet/bubbles/v2/help"
"github.com/charmbracelet/bubbles/v2/key"
tea "github.com/charmbracelet/bubbletea/v2"
@@ -228,7 +230,75 @@ func (m *modelDialogCmp) SetModelType(modelType int) tea.Cmd {
currentModel = cfg.Models.Small
}
+ // Create a map to track which providers we've already added
+ addedProviders := make(map[provider.InferenceProvider]bool)
+
+ // First, add any configured providers that are not in the known providers list
+ // These should appear at the top of the list
+ knownProviders := provider.KnownProviders()
+ for providerID, providerConfig := range cfg.Providers {
+ if providerConfig.Disabled {
+ continue
+ }
+
+ // Check if this provider is not in the known providers list
+ if !slices.Contains(knownProviders, providerID) {
+ // Convert config provider to provider.Provider format
+ configProvider := provider.Provider{
+ Name: string(providerID), // Use provider ID as name for unknown providers
+ ID: providerID,
+ Models: make([]provider.Model, len(providerConfig.Models)),
+ }
+
+ // Convert models
+ for i, model := range providerConfig.Models {
+ configProvider.Models[i] = provider.Model{
+ ID: model.ID,
+ Name: model.Name,
+ CostPer1MIn: model.CostPer1MIn,
+ CostPer1MOut: model.CostPer1MOut,
+ CostPer1MInCached: model.CostPer1MInCached,
+ CostPer1MOutCached: model.CostPer1MOutCached,
+ ContextWindow: model.ContextWindow,
+ DefaultMaxTokens: model.DefaultMaxTokens,
+ CanReason: model.CanReason,
+ HasReasoningEffort: model.HasReasoningEffort,
+ DefaultReasoningEffort: model.ReasoningEffort,
+ SupportsImages: model.SupportsImages,
+ }
+ }
+
+ // Add this unknown provider to the list
+ name := configProvider.Name
+ if name == "" {
+ name = string(configProvider.ID)
+ }
+ modelItems = append(modelItems, commands.NewItemSection(name))
+ for _, model := range configProvider.Models {
+ modelItems = append(modelItems, completions.NewCompletionItem(model.Name, ModelOption{
+ Provider: configProvider,
+ Model: model,
+ }))
+ if model.ID == currentModel.ModelID && configProvider.ID == currentModel.Provider {
+ selectIndex = len(modelItems) - 1 // Set the selected index to the current model
+ }
+ }
+ addedProviders[providerID] = true
+ }
+ }
+
+ // Then add the known providers from the predefined list
for _, provider := range providers {
+ // Skip if we already added this provider as an unknown provider
+ if addedProviders[provider.ID] {
+ continue
+ }
+
+ // Check if this provider is configured and not disabled
+ if providerConfig, exists := cfg.Providers[provider.ID]; exists && providerConfig.Disabled {
+ continue
+ }
+
name := provider.Name
if name == "" {
name = string(provider.ID)