@@ -123,7 +123,7 @@ func (a languageModel) Provider() string {
func (a languageModel) prepareParams(call ai.Call) (*anthropic.MessageNewParams, []ai.CallWarning, error) {
params := &anthropic.MessageNewParams{}
providerOptions := &ProviderOptions{}
- if v, ok := call.ProviderOptions[ProviderOptionsKey]; ok {
+ if v, ok := call.ProviderOptions[OptionsKey]; ok {
providerOptions, ok = v.(*ProviderOptions)
if !ok {
return nil, nil, ai.NewInvalidArgumentError("providerOptions", "anthropic provider options should be *anthropic.ProviderOptions", nil)
@@ -222,7 +222,7 @@ func (a languageModel) prepareParams(call ai.Call) (*anthropic.MessageNewParams,
}
func getCacheControl(providerOptions ai.ProviderOptions) *CacheControl {
- if anthropicOptions, ok := providerOptions[ProviderOptionsKey]; ok {
+ if anthropicOptions, ok := providerOptions[OptionsKey]; ok {
if options, ok := anthropicOptions.(*ProviderCacheControlOptions); ok {
return &options.CacheControl
}
@@ -231,7 +231,7 @@ func getCacheControl(providerOptions ai.ProviderOptions) *CacheControl {
}
func getReasoningMetadata(providerOptions ai.ProviderOptions) *ReasoningOptionMetadata {
- if anthropicOptions, ok := providerOptions[ProviderOptionsKey]; ok {
+ if anthropicOptions, ok := providerOptions[OptionsKey]; ok {
if reasoning, ok := anthropicOptions.(*ReasoningOptionMetadata); ok {
return reasoning
}
@@ -664,7 +664,7 @@ func (a languageModel) Generate(ctx context.Context, call ai.Call) (*ai.Response
content = append(content, ai.ReasoningContent{
Text: reasoning.Thinking,
ProviderMetadata: ai.ProviderMetadata{
- ProviderOptionsKey: &ReasoningOptionMetadata{
+ OptionsKey: &ReasoningOptionMetadata{
Signature: reasoning.Signature,
},
},
@@ -677,7 +677,7 @@ func (a languageModel) Generate(ctx context.Context, call ai.Call) (*ai.Response
content = append(content, ai.ReasoningContent{
Text: "",
ProviderMetadata: ai.ProviderMetadata{
- ProviderOptionsKey: &ReasoningOptionMetadata{
+ OptionsKey: &ReasoningOptionMetadata{
RedactedData: reasoning.Data,
},
},
@@ -756,7 +756,7 @@ func (a languageModel) Stream(ctx context.Context, call ai.Call) (ai.StreamRespo
Type: ai.StreamPartTypeReasoningStart,
ID: fmt.Sprintf("%d", chunk.Index),
ProviderMetadata: ai.ProviderMetadata{
- ProviderOptionsKey: &ReasoningOptionMetadata{
+ OptionsKey: &ReasoningOptionMetadata{
RedactedData: chunk.ContentBlock.Data,
},
},
@@ -832,7 +832,7 @@ func (a languageModel) Stream(ctx context.Context, call ai.Call) (ai.StreamRespo
Type: ai.StreamPartTypeReasoningDelta,
ID: fmt.Sprintf("%d", chunk.Index),
ProviderMetadata: ai.ProviderMetadata{
- ProviderOptionsKey: &ReasoningOptionMetadata{
+ OptionsKey: &ReasoningOptionMetadata{
Signature: chunk.Delta.Signature,
},
},
@@ -2,7 +2,7 @@ package anthropic
import "github.com/charmbracelet/ai/ai"
-const ProviderOptionsKey = "anthropic"
+const OptionsKey = "anthropic"
type ProviderOptions struct {
SendReasoning *bool
@@ -35,12 +35,12 @@ type CacheControl struct {
func NewProviderOptions(opts *ProviderOptions) ai.ProviderOptions {
return ai.ProviderOptions{
- ProviderOptionsKey: opts,
+ OptionsKey: opts,
}
}
func NewProviderCacheControlOptions(opts *ProviderCacheControlOptions) ai.ProviderOptions {
return ai.ProviderOptions{
- ProviderOptionsKey: opts,
+ OptionsKey: opts,
}
}
@@ -151,7 +151,7 @@ func (o languageModel) prepareParams(call ai.Call) (*openai.ChatCompletionNewPar
params := &openai.ChatCompletionNewParams{}
messages, warnings := toPrompt(call.Prompt)
providerOptions := &ProviderOptions{}
- if v, ok := call.ProviderOptions[ProviderOptionsKey]; ok {
+ if v, ok := call.ProviderOptions[OptionsKey]; ok {
providerOptions, ok = v.(*ProviderOptions)
if !ok {
return nil, nil, ai.NewInvalidArgumentError("providerOptions", "openai provider options should be *openai.ProviderOptions", nil)
@@ -471,7 +471,7 @@ func (o languageModel) Generate(ctx context.Context, call ai.Call) (*ai.Response
},
FinishReason: mapOpenAiFinishReason(choice.FinishReason),
ProviderMetadata: ai.ProviderMetadata{
- ProviderOptionsKey: providerMetadata,
+ OptionsKey: providerMetadata,
},
Warnings: warnings,
}, nil
@@ -733,7 +733,7 @@ func (o languageModel) Stream(ctx context.Context, call ai.Call) (ai.StreamRespo
Usage: usage,
FinishReason: finishReason,
ProviderMetadata: ai.ProviderMetadata{
- ProviderOptionsKey: streamProviderMetadata,
+ OptionsKey: streamProviderMetadata,
},
})
return
@@ -923,7 +923,7 @@ func toPrompt(prompt ai.Prompt) ([]openai.ChatCompletionMessageParamUnion, []ai.
imageURL := openai.ChatCompletionContentPartImageImageURLParam{URL: data}
// Check for provider-specific options like image detail
- if providerOptions, ok := filePart.ProviderOptions[ProviderOptionsKey]; ok {
+ if providerOptions, ok := filePart.ProviderOptions[OptionsKey]; ok {
if detail, ok := providerOptions.(*ProviderFileOptions); ok {
imageURL.Detail = detail.ImageDetail
}
@@ -5,7 +5,7 @@ import (
"github.com/openai/openai-go/v2"
)
-const ProviderOptionsKey = "openai"
+const OptionsKey = "openai"
type ReasoningEffort string
@@ -56,12 +56,12 @@ func ReasoningEffortOption(e ReasoningEffort) *ReasoningEffort {
func NewProviderOptions(opts *ProviderOptions) ai.ProviderOptions {
return ai.ProviderOptions{
- ProviderOptionsKey: opts,
+ OptionsKey: opts,
}
}
func NewProviderFileOptions(opts *ProviderFileOptions) ai.ProviderOptions {
return ai.ProviderOptions{
- ProviderOptionsKey: opts,
+ OptionsKey: opts,
}
}