chore: rename vars

Kujtim Hoxha created

Change summary

internal/ai/examples/agent/main.go                  |   4 
internal/ai/examples/stream/main.go                 |   2 
internal/ai/examples/streaming-agent-simple/main.go |   4 
internal/ai/providers/openai.go                     |  92 ++--
internal/ai/providers/openai_test.go                | 304 +++++++-------
5 files changed, 203 insertions(+), 203 deletions(-)

Detailed changes

internal/ai/examples/agent/main.go 🔗

@@ -10,8 +10,8 @@ import (
 )
 
 func main() {
-	provider := providers.NewOpenAIProvider(
-		providers.WithOpenAIApiKey(os.Getenv("OPENAI_API_KEY")),
+	provider := providers.NewOpenAiProvider(
+		providers.WithOpenAiAPIKey(os.Getenv("OPENAI_API_KEY")),
 	)
 	model, err := provider.LanguageModel("gpt-4o")
 	if err != nil {

internal/ai/examples/stream/main.go 🔗

@@ -11,7 +11,7 @@ import (
 )
 
 func main() {
-	provider := providers.NewOpenAIProvider(providers.WithOpenAIApiKey(os.Getenv("OPENAI_API_KEY")))
+	provider := providers.NewOpenAiProvider(providers.WithOpenAiAPIKey(os.Getenv("OPENAI_API_KEY")))
 	model, err := provider.LanguageModel("gpt-4o")
 	if err != nil {
 		fmt.Println(err)

internal/ai/examples/streaming-agent-simple/main.go 🔗

@@ -18,8 +18,8 @@ func main() {
 	}
 
 	// Create provider and model
-	provider := providers.NewOpenAIProvider(
-		providers.WithOpenAIApiKey(apiKey),
+	provider := providers.NewOpenAiProvider(
+		providers.WithOpenAiAPIKey(apiKey),
 	)
 	model, err := provider.LanguageModel("gpt-4o-mini")
 	if err != nil {

internal/ai/providers/openai.go 🔗

@@ -27,7 +27,7 @@ const (
 	ReasoningEffortHigh    ReasoningEffort = "high"
 )
 
-type OpenAIProviderOptions struct {
+type OpenAiProviderOptions struct {
 	LogitBias           map[string]int64 `json:"logit_bias"`
 	LogProbs            *bool            `json:"log_probes"`
 	TopLogProbs         *int64           `json:"top_log_probs"`
@@ -45,11 +45,11 @@ type OpenAIProviderOptions struct {
 	StructuredOutputs   *bool            `json:"structured_outputs"`
 }
 
-type openAIProvider struct {
-	options openAIProviderOptions
+type openAiProvider struct {
+	options openAiProviderOptions
 }
 
-type openAIProviderOptions struct {
+type openAiProviderOptions struct {
 	baseURL      string
 	apiKey       string
 	organization string
@@ -59,10 +59,10 @@ type openAIProviderOptions struct {
 	client       option.HTTPClient
 }
 
-type OpenAIOption = func(*openAIProviderOptions)
+type OpenAiOption = func(*openAiProviderOptions)
 
-func NewOpenAIProvider(opts ...OpenAIOption) ai.Provider {
-	options := openAIProviderOptions{
+func NewOpenAiProvider(opts ...OpenAiOption) ai.Provider {
+	options := openAiProviderOptions{
 		headers: map[string]string{},
 	}
 	for _, o := range opts {
@@ -78,62 +78,62 @@ func NewOpenAIProvider(opts ...OpenAIOption) ai.Provider {
 	}
 
 	if options.organization != "" {
-		options.headers["OpenAI-Organization"] = options.organization
+		options.headers["OpenAi-Organization"] = options.organization
 	}
 
 	if options.project != "" {
-		options.headers["OpenAI-Project"] = options.project
+		options.headers["OpenAi-Project"] = options.project
 	}
 
-	return &openAIProvider{
+	return &openAiProvider{
 		options: options,
 	}
 }
 
-func WithOpenAIBaseURL(baseURL string) OpenAIOption {
-	return func(o *openAIProviderOptions) {
+func WithOpenAiBaseURL(baseURL string) OpenAiOption {
+	return func(o *openAiProviderOptions) {
 		o.baseURL = baseURL
 	}
 }
 
-func WithOpenAIApiKey(apiKey string) OpenAIOption {
-	return func(o *openAIProviderOptions) {
+func WithOpenAiAPIKey(apiKey string) OpenAiOption {
+	return func(o *openAiProviderOptions) {
 		o.apiKey = apiKey
 	}
 }
 
-func WithOpenAIOrganization(organization string) OpenAIOption {
-	return func(o *openAIProviderOptions) {
+func WithOpenAiOrganization(organization string) OpenAiOption {
+	return func(o *openAiProviderOptions) {
 		o.organization = organization
 	}
 }
 
-func WithOpenAIProject(project string) OpenAIOption {
-	return func(o *openAIProviderOptions) {
+func WithOpenAiProject(project string) OpenAiOption {
+	return func(o *openAiProviderOptions) {
 		o.project = project
 	}
 }
 
-func WithOpenAIName(name string) OpenAIOption {
-	return func(o *openAIProviderOptions) {
+func WithOpenAiName(name string) OpenAiOption {
+	return func(o *openAiProviderOptions) {
 		o.name = name
 	}
 }
 
-func WithOpenAIHeaders(headers map[string]string) OpenAIOption {
-	return func(o *openAIProviderOptions) {
+func WithOpenAiHeaders(headers map[string]string) OpenAiOption {
+	return func(o *openAiProviderOptions) {
 		maps.Copy(o.headers, headers)
 	}
 }
 
-func WithOpenAIHttpClient(client option.HTTPClient) OpenAIOption {
-	return func(o *openAIProviderOptions) {
+func WithOpenAiHTTPClient(client option.HTTPClient) OpenAiOption {
+	return func(o *openAiProviderOptions) {
 		o.client = client
 	}
 }
 
 // LanguageModel implements ai.Provider.
-func (o *openAIProvider) LanguageModel(modelID string) (ai.LanguageModel, error) {
+func (o *openAiProvider) LanguageModel(modelID string) (ai.LanguageModel, error) {
 	openaiClientOptions := []option.RequestOption{}
 	if o.options.apiKey != "" {
 		openaiClientOptions = append(openaiClientOptions, option.WithAPIKey(o.options.apiKey))
@@ -150,7 +150,7 @@ func (o *openAIProvider) LanguageModel(modelID string) (ai.LanguageModel, error)
 		openaiClientOptions = append(openaiClientOptions, option.WithHTTPClient(o.options.client))
 	}
 
-	return openAILanguageModel{
+	return openAiLanguageModel{
 		modelID:         modelID,
 		provider:        fmt.Sprintf("%s.chat", o.options.name),
 		providerOptions: o.options,
@@ -158,27 +158,27 @@ func (o *openAIProvider) LanguageModel(modelID string) (ai.LanguageModel, error)
 	}, nil
 }
 
-type openAILanguageModel struct {
+type openAiLanguageModel struct {
 	provider        string
 	modelID         string
 	client          openai.Client
-	providerOptions openAIProviderOptions
+	providerOptions openAiProviderOptions
 }
 
 // Model implements ai.LanguageModel.
-func (o openAILanguageModel) Model() string {
+func (o openAiLanguageModel) Model() string {
 	return o.modelID
 }
 
 // Provider implements ai.LanguageModel.
-func (o openAILanguageModel) Provider() string {
+func (o openAiLanguageModel) Provider() string {
 	return o.provider
 }
 
-func (o openAILanguageModel) prepareParams(call ai.Call) (*openai.ChatCompletionNewParams, []ai.CallWarning, error) {
+func (o openAiLanguageModel) prepareParams(call ai.Call) (*openai.ChatCompletionNewParams, []ai.CallWarning, error) {
 	params := &openai.ChatCompletionNewParams{}
-	messages, warnings := toOpenAIPrompt(call.Prompt)
-	providerOptions := &OpenAIProviderOptions{}
+	messages, warnings := toOpenAiPrompt(call.Prompt)
+	providerOptions := &OpenAiProviderOptions{}
 	if v, ok := call.ProviderOptions["openai"]; ok {
 		err := ai.ParseOptions(v, providerOptions)
 		if err != nil {
@@ -387,7 +387,7 @@ func (o openAILanguageModel) prepareParams(call ai.Call) (*openai.ChatCompletion
 	}
 
 	if len(call.Tools) > 0 {
-		tools, toolChoice, toolWarnings := toOpenAITools(call.Tools, call.ToolChoice)
+		tools, toolChoice, toolWarnings := toOpenAiTools(call.Tools, call.ToolChoice)
 		params.Tools = tools
 		if toolChoice != nil {
 			params.ToolChoice = *toolChoice
@@ -397,7 +397,7 @@ func (o openAILanguageModel) prepareParams(call ai.Call) (*openai.ChatCompletion
 	return params, warnings, nil
 }
 
-func (o openAILanguageModel) handleError(err error) error {
+func (o openAiLanguageModel) handleError(err error) error {
 	var apiErr *openai.Error
 	if errors.As(err, &apiErr) {
 		requestDump := apiErr.DumpRequest(true)
@@ -422,7 +422,7 @@ func (o openAILanguageModel) handleError(err error) error {
 }
 
 // Generate implements ai.LanguageModel.
-func (o openAILanguageModel) Generate(ctx context.Context, call ai.Call) (*ai.Response, error) {
+func (o openAiLanguageModel) Generate(ctx context.Context, call ai.Call) (*ai.Response, error) {
 	params, warnings, err := o.prepareParams(call)
 	if err != nil {
 		return nil, err
@@ -500,7 +500,7 @@ func (o openAILanguageModel) Generate(ctx context.Context, call ai.Call) (*ai.Re
 			ReasoningTokens: completionTokenDetails.ReasoningTokens,
 			CacheReadTokens: promptTokenDetails.CachedTokens,
 		},
-		FinishReason:     mapOpenAIFinishReason(choice.FinishReason),
+		FinishReason:     mapOpenAiFinishReason(choice.FinishReason),
 		ProviderMetadata: providerMetadata,
 		Warnings:         warnings,
 	}, nil
@@ -514,7 +514,7 @@ type toolCall struct {
 }
 
 // Stream implements ai.LanguageModel.
-func (o openAILanguageModel) Stream(ctx context.Context, call ai.Call) (ai.StreamResponse, error) {
+func (o openAiLanguageModel) Stream(ctx context.Context, call ai.Call) (ai.StreamResponse, error) {
 	params, warnings, err := o.prepareParams(call)
 	if err != nil {
 		return nil, err
@@ -759,7 +759,7 @@ func (o openAILanguageModel) Stream(ctx context.Context, call ai.Call) (ai.Strea
 				}
 			}
 
-			finishReason := mapOpenAIFinishReason(acc.Choices[0].FinishReason)
+			finishReason := mapOpenAiFinishReason(acc.Choices[0].FinishReason)
 			yield(ai.StreamPart{
 				Type:             ai.StreamPartTypeFinish,
 				Usage:            usage,
@@ -777,7 +777,7 @@ func (o openAILanguageModel) Stream(ctx context.Context, call ai.Call) (ai.Strea
 	}, nil
 }
 
-func mapOpenAIFinishReason(finishReason string) ai.FinishReason {
+func mapOpenAiFinishReason(finishReason string) ai.FinishReason {
 	switch finishReason {
 	case "stop":
 		return ai.FinishReasonStop
@@ -810,14 +810,14 @@ func supportsPriorityProcessing(modelID string) bool {
 		strings.HasPrefix(modelID, "o4-mini")
 }
 
-func toOpenAITools(tools []ai.Tool, toolChoice *ai.ToolChoice) (openAITools []openai.ChatCompletionToolUnionParam, openAIToolChoice *openai.ChatCompletionToolChoiceOptionUnionParam, warnings []ai.CallWarning) {
+func toOpenAiTools(tools []ai.Tool, toolChoice *ai.ToolChoice) (openAiTools []openai.ChatCompletionToolUnionParam, openAiToolChoice *openai.ChatCompletionToolChoiceOptionUnionParam, warnings []ai.CallWarning) {
 	for _, tool := range tools {
 		if tool.GetType() == ai.ToolTypeFunction {
 			ft, ok := tool.(ai.FunctionTool)
 			if !ok {
 				continue
 			}
-			openAITools = append(openAITools, openai.ChatCompletionToolUnionParam{
+			openAiTools = append(openAiTools, openai.ChatCompletionToolUnionParam{
 				OfFunction: &openai.ChatCompletionFunctionToolParam{
 					Function: shared.FunctionDefinitionParam{
 						Name:        ft.Name,
@@ -844,15 +844,15 @@ func toOpenAITools(tools []ai.Tool, toolChoice *ai.ToolChoice) (openAITools []op
 
 	switch *toolChoice {
 	case ai.ToolChoiceAuto:
-		openAIToolChoice = &openai.ChatCompletionToolChoiceOptionUnionParam{
+		openAiToolChoice = &openai.ChatCompletionToolChoiceOptionUnionParam{
 			OfAuto: param.NewOpt("auto"),
 		}
 	case ai.ToolChoiceNone:
-		openAIToolChoice = &openai.ChatCompletionToolChoiceOptionUnionParam{
+		openAiToolChoice = &openai.ChatCompletionToolChoiceOptionUnionParam{
 			OfAuto: param.NewOpt("none"),
 		}
 	default:
-		openAIToolChoice = &openai.ChatCompletionToolChoiceOptionUnionParam{
+		openAiToolChoice = &openai.ChatCompletionToolChoiceOptionUnionParam{
 			OfFunctionToolChoice: &openai.ChatCompletionNamedToolChoiceParam{
 				Type: "function",
 				Function: openai.ChatCompletionNamedToolChoiceFunctionParam{
@@ -864,7 +864,7 @@ func toOpenAITools(tools []ai.Tool, toolChoice *ai.ToolChoice) (openAITools []op
 	return
 }
 
-func toOpenAIPrompt(prompt ai.Prompt) ([]openai.ChatCompletionMessageParamUnion, []ai.CallWarning) {
+func toOpenAiPrompt(prompt ai.Prompt) ([]openai.ChatCompletionMessageParamUnion, []ai.CallWarning) {
 	var messages []openai.ChatCompletionMessageParamUnion
 	var warnings []ai.CallWarning
 	for _, msg := range prompt {

internal/ai/providers/openai_test.go 🔗

@@ -15,7 +15,7 @@ import (
 	"github.com/stretchr/testify/require"
 )
 
-func TestToOpenAIPrompt_SystemMessages(t *testing.T) {
+func TestToOpenAiPrompt_SystemMessages(t *testing.T) {
 	t.Parallel()
 
 	t.Run("should forward system messages", func(t *testing.T) {
@@ -30,7 +30,7 @@ func TestToOpenAIPrompt_SystemMessages(t *testing.T) {
 			},
 		}
 
-		messages, warnings := toOpenAIPrompt(prompt)
+		messages, warnings := toOpenAiPrompt(prompt)
 
 		require.Empty(t, warnings)
 		require.Len(t, messages, 1)
@@ -50,7 +50,7 @@ func TestToOpenAIPrompt_SystemMessages(t *testing.T) {
 			},
 		}
 
-		messages, warnings := toOpenAIPrompt(prompt)
+		messages, warnings := toOpenAiPrompt(prompt)
 
 		require.Len(t, warnings, 1)
 		require.Contains(t, warnings[0].Message, "system prompt has no text parts")
@@ -70,7 +70,7 @@ func TestToOpenAIPrompt_SystemMessages(t *testing.T) {
 			},
 		}
 
-		messages, warnings := toOpenAIPrompt(prompt)
+		messages, warnings := toOpenAiPrompt(prompt)
 
 		require.Empty(t, warnings)
 		require.Len(t, messages, 1)
@@ -81,7 +81,7 @@ func TestToOpenAIPrompt_SystemMessages(t *testing.T) {
 	})
 }
 
-func TestToOpenAIPrompt_UserMessages(t *testing.T) {
+func TestToOpenAiPrompt_UserMessages(t *testing.T) {
 	t.Parallel()
 
 	t.Run("should convert messages with only a text part to a string content", func(t *testing.T) {
@@ -96,7 +96,7 @@ func TestToOpenAIPrompt_UserMessages(t *testing.T) {
 			},
 		}
 
-		messages, warnings := toOpenAIPrompt(prompt)
+		messages, warnings := toOpenAiPrompt(prompt)
 
 		require.Empty(t, warnings)
 		require.Len(t, messages, 1)
@@ -123,7 +123,7 @@ func TestToOpenAIPrompt_UserMessages(t *testing.T) {
 			},
 		}
 
-		messages, warnings := toOpenAIPrompt(prompt)
+		messages, warnings := toOpenAiPrompt(prompt)
 
 		require.Empty(t, warnings)
 		require.Len(t, messages, 1)
@@ -167,7 +167,7 @@ func TestToOpenAIPrompt_UserMessages(t *testing.T) {
 			},
 		}
 
-		messages, warnings := toOpenAIPrompt(prompt)
+		messages, warnings := toOpenAiPrompt(prompt)
 
 		require.Empty(t, warnings)
 		require.Len(t, messages, 1)
@@ -184,7 +184,7 @@ func TestToOpenAIPrompt_UserMessages(t *testing.T) {
 	})
 }
 
-func TestToOpenAIPrompt_FileParts(t *testing.T) {
+func TestToOpenAiPrompt_FileParts(t *testing.T) {
 	t.Parallel()
 
 	t.Run("should throw for unsupported mime types", func(t *testing.T) {
@@ -202,7 +202,7 @@ func TestToOpenAIPrompt_FileParts(t *testing.T) {
 			},
 		}
 
-		messages, warnings := toOpenAIPrompt(prompt)
+		messages, warnings := toOpenAiPrompt(prompt)
 
 		require.Len(t, warnings, 1)
 		require.Contains(t, warnings[0].Message, "file part media type application/something not supported")
@@ -225,7 +225,7 @@ func TestToOpenAIPrompt_FileParts(t *testing.T) {
 			},
 		}
 
-		messages, warnings := toOpenAIPrompt(prompt)
+		messages, warnings := toOpenAiPrompt(prompt)
 
 		require.Empty(t, warnings)
 		require.Len(t, messages, 1)
@@ -258,7 +258,7 @@ func TestToOpenAIPrompt_FileParts(t *testing.T) {
 			},
 		}
 
-		messages, warnings := toOpenAIPrompt(prompt)
+		messages, warnings := toOpenAiPrompt(prompt)
 
 		require.Empty(t, warnings)
 		require.Len(t, messages, 1)
@@ -286,7 +286,7 @@ func TestToOpenAIPrompt_FileParts(t *testing.T) {
 			},
 		}
 
-		messages, warnings := toOpenAIPrompt(prompt)
+		messages, warnings := toOpenAiPrompt(prompt)
 
 		require.Empty(t, warnings)
 		require.Len(t, messages, 1)
@@ -315,7 +315,7 @@ func TestToOpenAIPrompt_FileParts(t *testing.T) {
 			},
 		}
 
-		messages, warnings := toOpenAIPrompt(prompt)
+		messages, warnings := toOpenAiPrompt(prompt)
 
 		require.Empty(t, warnings)
 		require.Len(t, messages, 1)
@@ -349,7 +349,7 @@ func TestToOpenAIPrompt_FileParts(t *testing.T) {
 			},
 		}
 
-		messages, warnings := toOpenAIPrompt(prompt)
+		messages, warnings := toOpenAiPrompt(prompt)
 
 		require.Empty(t, warnings)
 		require.Len(t, messages, 1)
@@ -378,7 +378,7 @@ func TestToOpenAIPrompt_FileParts(t *testing.T) {
 			},
 		}
 
-		messages, warnings := toOpenAIPrompt(prompt)
+		messages, warnings := toOpenAiPrompt(prompt)
 
 		require.Empty(t, warnings)
 		require.Len(t, messages, 1)
@@ -408,7 +408,7 @@ func TestToOpenAIPrompt_FileParts(t *testing.T) {
 			},
 		}
 
-		messages, warnings := toOpenAIPrompt(prompt)
+		messages, warnings := toOpenAiPrompt(prompt)
 
 		require.Empty(t, warnings)
 		require.Len(t, messages, 1)
@@ -421,7 +421,7 @@ func TestToOpenAIPrompt_FileParts(t *testing.T) {
 	})
 }
 
-func TestToOpenAIPrompt_ToolCalls(t *testing.T) {
+func TestToOpenAiPrompt_ToolCalls(t *testing.T) {
 	t.Parallel()
 
 	t.Run("should stringify arguments to tool calls", func(t *testing.T) {
@@ -457,7 +457,7 @@ func TestToOpenAIPrompt_ToolCalls(t *testing.T) {
 			},
 		}
 
-		messages, warnings := toOpenAIPrompt(prompt)
+		messages, warnings := toOpenAiPrompt(prompt)
 
 		require.Empty(t, warnings)
 		require.Len(t, messages, 2)
@@ -504,7 +504,7 @@ func TestToOpenAIPrompt_ToolCalls(t *testing.T) {
 			},
 		}
 
-		messages, warnings := toOpenAIPrompt(prompt)
+		messages, warnings := toOpenAiPrompt(prompt)
 
 		require.Empty(t, warnings)
 		require.Len(t, messages, 2)
@@ -523,7 +523,7 @@ func TestToOpenAIPrompt_ToolCalls(t *testing.T) {
 	})
 }
 
-func TestToOpenAIPrompt_AssistantMessages(t *testing.T) {
+func TestToOpenAiPrompt_AssistantMessages(t *testing.T) {
 	t.Parallel()
 
 	t.Run("should handle simple text assistant messages", func(t *testing.T) {
@@ -538,7 +538,7 @@ func TestToOpenAIPrompt_AssistantMessages(t *testing.T) {
 			},
 		}
 
-		messages, warnings := toOpenAIPrompt(prompt)
+		messages, warnings := toOpenAiPrompt(prompt)
 
 		require.Empty(t, warnings)
 		require.Len(t, messages, 1)
@@ -568,7 +568,7 @@ func TestToOpenAIPrompt_AssistantMessages(t *testing.T) {
 			},
 		}
 
-		messages, warnings := toOpenAIPrompt(prompt)
+		messages, warnings := toOpenAiPrompt(prompt)
 
 		require.Empty(t, warnings)
 		require.Len(t, messages, 1)
@@ -811,9 +811,9 @@ func TestDoGenerate(t *testing.T) {
 			"content": "Hello, World!",
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
@@ -843,9 +843,9 @@ func TestDoGenerate(t *testing.T) {
 			},
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
@@ -867,9 +867,9 @@ func TestDoGenerate(t *testing.T) {
 
 		server.prepareJSONResponse(map[string]any{})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
@@ -907,9 +907,9 @@ func TestDoGenerate(t *testing.T) {
 			},
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
@@ -933,9 +933,9 @@ func TestDoGenerate(t *testing.T) {
 			"logprobs": testLogprobs,
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
@@ -969,9 +969,9 @@ func TestDoGenerate(t *testing.T) {
 			"finish_reason": "stop",
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
@@ -993,9 +993,9 @@ func TestDoGenerate(t *testing.T) {
 			"finish_reason": "eos",
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
@@ -1017,9 +1017,9 @@ func TestDoGenerate(t *testing.T) {
 			"content": "",
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
@@ -1049,9 +1049,9 @@ func TestDoGenerate(t *testing.T) {
 
 		server.prepareJSONResponse(map[string]any{})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
@@ -1093,9 +1093,9 @@ func TestDoGenerate(t *testing.T) {
 			"content": "",
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("o1-mini")
 
@@ -1133,9 +1133,9 @@ func TestDoGenerate(t *testing.T) {
 			"content": "",
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-4o")
 
@@ -1173,9 +1173,9 @@ func TestDoGenerate(t *testing.T) {
 			"content": "",
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
@@ -1245,9 +1245,9 @@ func TestDoGenerate(t *testing.T) {
 			},
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
@@ -1303,9 +1303,9 @@ func TestDoGenerate(t *testing.T) {
 			},
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
@@ -1345,9 +1345,9 @@ func TestDoGenerate(t *testing.T) {
 			},
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-4o-mini")
 
@@ -1380,9 +1380,9 @@ func TestDoGenerate(t *testing.T) {
 			},
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-4o-mini")
 
@@ -1407,9 +1407,9 @@ func TestDoGenerate(t *testing.T) {
 
 		server.prepareJSONResponse(map[string]any{})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("o1-preview")
 
@@ -1455,9 +1455,9 @@ func TestDoGenerate(t *testing.T) {
 
 		server.prepareJSONResponse(map[string]any{})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("o1-preview")
 
@@ -1499,9 +1499,9 @@ func TestDoGenerate(t *testing.T) {
 			},
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("o1-preview")
 
@@ -1526,9 +1526,9 @@ func TestDoGenerate(t *testing.T) {
 			"model": "o1-preview",
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("o1-preview")
 
@@ -1566,9 +1566,9 @@ func TestDoGenerate(t *testing.T) {
 			"content": "",
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
@@ -1612,9 +1612,9 @@ func TestDoGenerate(t *testing.T) {
 			"content": "",
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
@@ -1652,9 +1652,9 @@ func TestDoGenerate(t *testing.T) {
 			"content": "",
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
@@ -1696,9 +1696,9 @@ func TestDoGenerate(t *testing.T) {
 			"content": "",
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
@@ -1736,9 +1736,9 @@ func TestDoGenerate(t *testing.T) {
 			"content": "",
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
@@ -1774,9 +1774,9 @@ func TestDoGenerate(t *testing.T) {
 
 		server.prepareJSONResponse(map[string]any{})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-4o-search-preview")
 
@@ -1808,9 +1808,9 @@ func TestDoGenerate(t *testing.T) {
 			"content": "",
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("o3-mini")
 
@@ -1846,9 +1846,9 @@ func TestDoGenerate(t *testing.T) {
 
 		server.prepareJSONResponse(map[string]any{})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-4o-mini")
 
@@ -1881,9 +1881,9 @@ func TestDoGenerate(t *testing.T) {
 
 		server.prepareJSONResponse(map[string]any{})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-4o-mini")
 
@@ -1919,9 +1919,9 @@ func TestDoGenerate(t *testing.T) {
 
 		server.prepareJSONResponse(map[string]any{})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
@@ -2228,9 +2228,9 @@ func TestDoStream(t *testing.T) {
 			"logprobs": testLogprobs,
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
@@ -2284,9 +2284,9 @@ func TestDoStream(t *testing.T) {
 
 		server.prepareToolStreamResponse()
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
@@ -2370,9 +2370,9 @@ func TestDoStream(t *testing.T) {
 			},
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
@@ -2409,9 +2409,9 @@ func TestDoStream(t *testing.T) {
 
 		server.prepareErrorStreamResponse()
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
@@ -2450,9 +2450,9 @@ func TestDoStream(t *testing.T) {
 			"content": []string{},
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
@@ -2498,9 +2498,9 @@ func TestDoStream(t *testing.T) {
 			},
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
@@ -2548,9 +2548,9 @@ func TestDoStream(t *testing.T) {
 			},
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
@@ -2591,9 +2591,9 @@ func TestDoStream(t *testing.T) {
 			"content": []string{},
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
@@ -2635,9 +2635,9 @@ func TestDoStream(t *testing.T) {
 			"content": []string{},
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
@@ -2683,9 +2683,9 @@ func TestDoStream(t *testing.T) {
 			"content": []string{},
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("o3-mini")
 
@@ -2727,9 +2727,9 @@ func TestDoStream(t *testing.T) {
 			"content": []string{},
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("gpt-4o-mini")
 
@@ -2772,9 +2772,9 @@ func TestDoStream(t *testing.T) {
 			"model":   "o1-preview",
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("o1-preview")
 
@@ -2818,9 +2818,9 @@ func TestDoStream(t *testing.T) {
 			},
 		})
 
-		provider := NewOpenAIProvider(
-			WithOpenAIApiKey("test-api-key"),
-			WithOpenAIBaseURL(server.server.URL),
+		provider := NewOpenAiProvider(
+			WithOpenAiAPIKey("test-api-key"),
+			WithOpenAiBaseURL(server.server.URL),
 		)
 		model, _ := provider.LanguageModel("o1-preview")