From 2058c736d88c8a108a9dec8d75fff3ffc54621d4 Mon Sep 17 00:00:00 2001 From: kujtimiihoxha Date: Fri, 12 Sep 2025 14:15:00 +0200 Subject: [PATCH] chore: add json tags to provider options --- anthropic/provider_options.go | 16 +++++++-------- openai/provider_options.go | 38 +++++++++++++++++------------------ 2 files changed, 27 insertions(+), 27 deletions(-) diff --git a/anthropic/provider_options.go b/anthropic/provider_options.go index 54bcc1b968e6f27e8cc7f8f5e1a222c13895f891..f568b486269f8747eeaab1cd0b87769a3163aea5 100644 --- a/anthropic/provider_options.go +++ b/anthropic/provider_options.go @@ -5,32 +5,32 @@ import "github.com/charmbracelet/fantasy/ai" const OptionsKey = "anthropic" type ProviderOptions struct { - SendReasoning *bool - Thinking *ThinkingProviderOption - DisableParallelToolUse *bool + SendReasoning *bool `json:"send_reasoning"` + Thinking *ThinkingProviderOption `json:"thinking"` + DisableParallelToolUse *bool `json:"disable_parallel_tool_use"` } func (o *ProviderOptions) Options() {} type ThinkingProviderOption struct { - BudgetTokens int64 + BudgetTokens int64 `json:"budget_tokens"` } type ReasoningOptionMetadata struct { - Signature string - RedactedData string + Signature string `json:"signature"` + RedactedData string `json:"redacted_data"` } func (*ReasoningOptionMetadata) Options() {} type ProviderCacheControlOptions struct { - CacheControl CacheControl + CacheControl CacheControl `json:"cache_control"` } func (*ProviderCacheControlOptions) Options() {} type CacheControl struct { - Type string + Type string `json:"type"` } func NewProviderOptions(opts *ProviderOptions) ai.ProviderOptions { diff --git a/openai/provider_options.go b/openai/provider_options.go index 635b87384835c6b0e53e52d8c9443c09154a3b66..b8b3e85e430aca8f91c110857fd7846c477d4bf4 100644 --- a/openai/provider_options.go +++ b/openai/provider_options.go @@ -17,35 +17,35 @@ const ( ) type ProviderMetadata struct { - Logprobs []openai.ChatCompletionTokenLogprob - AcceptedPredictionTokens int64 - RejectedPredictionTokens int64 + Logprobs []openai.ChatCompletionTokenLogprob `json:"logprobs"` + AcceptedPredictionTokens int64 `json:"accepted_prediction_tokens"` + RejectedPredictionTokens int64 `json:"rejected_prediction_tokens"` } func (*ProviderMetadata) Options() {} type ProviderOptions struct { - LogitBias map[string]int64 - LogProbs *bool - TopLogProbs *int64 - ParallelToolCalls *bool - User *string - ReasoningEffort *ReasoningEffort - MaxCompletionTokens *int64 - TextVerbosity *string - Prediction map[string]any - Store *bool - Metadata map[string]any - PromptCacheKey *string - SafetyIdentifier *string - ServiceTier *string - StructuredOutputs *bool + LogitBias map[string]int64 `json:"logit_bias"` + LogProbs *bool `json:"log_probs"` + TopLogProbs *int64 `json:"top_log_probs"` + ParallelToolCalls *bool `json:"parallel_tool_calls"` + User *string `json:"user"` + ReasoningEffort *ReasoningEffort `json:"reasoning_effort"` + MaxCompletionTokens *int64 `json:"max_completion_tokens"` + TextVerbosity *string `json:"text_verbosity"` + Prediction map[string]any `json:"prediction"` + Store *bool `json:"store"` + Metadata map[string]any `json:"metadata"` + PromptCacheKey *string `json:"prompt_cache_key"` + SafetyIdentifier *string `json:"safety_identifier"` + ServiceTier *string `json:"service_tier"` + StructuredOutputs *bool `json:"structured_outputs"` } func (*ProviderOptions) Options() {} type ProviderFileOptions struct { - ImageDetail string + ImageDetail string `json:"image_detail"` } func (*ProviderFileOptions) Options() {}