Detailed changes
@@ -1,6 +1,7 @@
package ai
type Provider interface {
+ Name() string
LanguageModel(modelID string) (LanguageModel, error)
- // TODO: add other model types when needed
+ ParseOptions(data map[string]any) (ProviderOptionsData, error)
}
@@ -1,5 +1,7 @@
package ai
+import "github.com/go-viper/mapstructure/v2"
+
func FloatOption(f float64) *float64 {
return &f
}
@@ -15,3 +17,14 @@ func StringOption(s string) *string {
func IntOption(i int64) *int64 {
return &i
}
+
+func ParseOptions[T any](options map[string]any, m *T) error {
+ decoder, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{
+ TagName: "json",
+ Result: m,
+ })
+ if err != nil {
+ return err
+ }
+ return decoder.Decode(options)
+}
@@ -123,7 +123,7 @@ func (a languageModel) Provider() string {
func (a languageModel) prepareParams(call ai.Call) (*anthropic.MessageNewParams, []ai.CallWarning, error) {
params := &anthropic.MessageNewParams{}
providerOptions := &ProviderOptions{}
- if v, ok := call.ProviderOptions[OptionsKey]; ok {
+ if v, ok := call.ProviderOptions[Name]; ok {
providerOptions, ok = v.(*ProviderOptions)
if !ok {
return nil, nil, ai.NewInvalidArgumentError("providerOptions", "anthropic provider options should be *anthropic.ProviderOptions", nil)
@@ -221,8 +221,20 @@ func (a languageModel) prepareParams(call ai.Call) (*anthropic.MessageNewParams,
return params, warnings, nil
}
+func (a *provider) ParseOptions(data map[string]any) (ai.ProviderOptionsData, error) {
+ var options ProviderOptions
+ if err := ai.ParseOptions(data, &options); err != nil {
+ return nil, err
+ }
+ return &options, nil
+}
+
+func (a *provider) Name() string {
+ return Name
+}
+
func getCacheControl(providerOptions ai.ProviderOptions) *CacheControl {
- if anthropicOptions, ok := providerOptions[OptionsKey]; ok {
+ if anthropicOptions, ok := providerOptions[Name]; ok {
if options, ok := anthropicOptions.(*ProviderCacheControlOptions); ok {
return &options.CacheControl
}
@@ -231,7 +243,7 @@ func getCacheControl(providerOptions ai.ProviderOptions) *CacheControl {
}
func getReasoningMetadata(providerOptions ai.ProviderOptions) *ReasoningOptionMetadata {
- if anthropicOptions, ok := providerOptions[OptionsKey]; ok {
+ if anthropicOptions, ok := providerOptions[Name]; ok {
if reasoning, ok := anthropicOptions.(*ReasoningOptionMetadata); ok {
return reasoning
}
@@ -664,7 +676,7 @@ func (a languageModel) Generate(ctx context.Context, call ai.Call) (*ai.Response
content = append(content, ai.ReasoningContent{
Text: reasoning.Thinking,
ProviderMetadata: ai.ProviderMetadata{
- OptionsKey: &ReasoningOptionMetadata{
+ Name: &ReasoningOptionMetadata{
Signature: reasoning.Signature,
},
},
@@ -677,7 +689,7 @@ func (a languageModel) Generate(ctx context.Context, call ai.Call) (*ai.Response
content = append(content, ai.ReasoningContent{
Text: "",
ProviderMetadata: ai.ProviderMetadata{
- OptionsKey: &ReasoningOptionMetadata{
+ Name: &ReasoningOptionMetadata{
RedactedData: reasoning.Data,
},
},
@@ -756,7 +768,7 @@ func (a languageModel) Stream(ctx context.Context, call ai.Call) (ai.StreamRespo
Type: ai.StreamPartTypeReasoningStart,
ID: fmt.Sprintf("%d", chunk.Index),
ProviderMetadata: ai.ProviderMetadata{
- OptionsKey: &ReasoningOptionMetadata{
+ Name: &ReasoningOptionMetadata{
RedactedData: chunk.ContentBlock.Data,
},
},
@@ -832,7 +844,7 @@ func (a languageModel) Stream(ctx context.Context, call ai.Call) (ai.StreamRespo
Type: ai.StreamPartTypeReasoningDelta,
ID: fmt.Sprintf("%d", chunk.Index),
ProviderMetadata: ai.ProviderMetadata{
- OptionsKey: &ReasoningOptionMetadata{
+ Name: &ReasoningOptionMetadata{
Signature: chunk.Delta.Signature,
},
},
@@ -2,7 +2,7 @@ package anthropic
import "github.com/charmbracelet/fantasy/ai"
-const OptionsKey = "anthropic"
+const Name = "anthropic"
type ProviderOptions struct {
SendReasoning *bool `json:"send_reasoning"`
@@ -35,12 +35,12 @@ type CacheControl struct {
func NewProviderOptions(opts *ProviderOptions) ai.ProviderOptions {
return ai.ProviderOptions{
- OptionsKey: opts,
+ Name: opts,
}
}
func NewProviderCacheControlOptions(opts *ProviderCacheControlOptions) ai.ProviderOptions {
return ai.ProviderOptions{
- OptionsKey: opts,
+ Name: opts,
}
}
@@ -5,6 +5,7 @@ go 1.24.5
require (
github.com/anthropics/anthropic-sdk-go v1.10.0
github.com/charmbracelet/x/json v0.2.0
+ github.com/go-viper/mapstructure/v2 v2.4.0
github.com/google/uuid v1.6.0
github.com/joho/godotenv v1.5.1
github.com/openai/openai-go/v2 v2.3.0
@@ -4,6 +4,8 @@ github.com/charmbracelet/x/json v0.2.0 h1:DqB+ZGx2h+Z+1s98HOuOyli+i97wsFQIxP2ZQA
github.com/charmbracelet/x/json v0.2.0/go.mod h1:opFIflx2YgXgi49xVUu8gEQ21teFAxyMwvOiZhIvWNM=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs=
+github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw=
github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
@@ -151,7 +151,7 @@ func (o languageModel) prepareParams(call ai.Call) (*openai.ChatCompletionNewPar
params := &openai.ChatCompletionNewParams{}
messages, warnings := toPrompt(call.Prompt)
providerOptions := &ProviderOptions{}
- if v, ok := call.ProviderOptions[OptionsKey]; ok {
+ if v, ok := call.ProviderOptions[Name]; ok {
providerOptions, ok = v.(*ProviderOptions)
if !ok {
return nil, nil, ai.NewInvalidArgumentError("providerOptions", "openai provider options should be *openai.ProviderOptions", nil)
@@ -471,7 +471,7 @@ func (o languageModel) Generate(ctx context.Context, call ai.Call) (*ai.Response
},
FinishReason: mapOpenAiFinishReason(choice.FinishReason),
ProviderMetadata: ai.ProviderMetadata{
- OptionsKey: providerMetadata,
+ Name: providerMetadata,
},
Warnings: warnings,
}, nil
@@ -733,7 +733,7 @@ func (o languageModel) Stream(ctx context.Context, call ai.Call) (ai.StreamRespo
Usage: usage,
FinishReason: finishReason,
ProviderMetadata: ai.ProviderMetadata{
- OptionsKey: streamProviderMetadata,
+ Name: streamProviderMetadata,
},
})
return
@@ -747,6 +747,18 @@ func (o languageModel) Stream(ctx context.Context, call ai.Call) (ai.StreamRespo
}, nil
}
+func (o *provider) ParseOptions(data map[string]any) (ai.ProviderOptionsData, error) {
+ var options ProviderOptions
+ if err := ai.ParseOptions(data, &options); err != nil {
+ return nil, err
+ }
+ return &options, nil
+}
+
+func (o *provider) Name() string {
+ return Name
+}
+
func mapOpenAiFinishReason(finishReason string) ai.FinishReason {
switch finishReason {
case "stop":
@@ -923,7 +935,7 @@ func toPrompt(prompt ai.Prompt) ([]openai.ChatCompletionMessageParamUnion, []ai.
imageURL := openai.ChatCompletionContentPartImageImageURLParam{URL: data}
// Check for provider-specific options like image detail
- if providerOptions, ok := filePart.ProviderOptions[OptionsKey]; ok {
+ if providerOptions, ok := filePart.ProviderOptions[Name]; ok {
if detail, ok := providerOptions.(*ProviderFileOptions); ok {
imageURL.Detail = detail.ImageDetail
}
@@ -5,7 +5,7 @@ import (
"github.com/openai/openai-go/v2"
)
-const OptionsKey = "openai"
+const Name = "openai"
type ReasoningEffort string
@@ -56,12 +56,12 @@ func ReasoningEffortOption(e ReasoningEffort) *ReasoningEffort {
func NewProviderOptions(opts *ProviderOptions) ai.ProviderOptions {
return ai.ProviderOptions{
- OptionsKey: opts,
+ Name: opts,
}
}
func NewProviderFileOptions(opts *ProviderFileOptions) ai.ProviderOptions {
return ai.ProviderOptions{
- OptionsKey: opts,
+ Name: opts,
}
}