chore: change the provider interface

Kujtim Hoxha created

Change summary

examples/agent/main.go                  |  6 +
examples/simple/main.go                 |  8 +
examples/stream/main.go                 |  6 +
examples/streaming-agent-simple/main.go |  6 +
examples/streaming-agent/main.go        | 18 +++--
provider.go                             |  2 
providers/anthropic.go                  | 21 +++---
providers/openai.go                     |  4 
providers/openai_test.go                | 86 +++++++++++++-------------
9 files changed, 88 insertions(+), 69 deletions(-)

Detailed changes

examples/agent/main.go 🔗

@@ -13,7 +13,11 @@ func main() {
 	provider := providers.NewOpenAIProvider(
 		providers.WithOpenAIApiKey(os.Getenv("OPENAI_API_KEY")),
 	)
-	model := provider.LanguageModel("gpt-4o")
+	model, err := provider.LanguageModel("gpt-4o")
+	if err != nil {
+		fmt.Println(err)
+		return
+	}
 
 	// Create weather tool using the new type-safe API
 	type WeatherInput struct {

examples/simple/main.go 🔗

@@ -10,8 +10,12 @@ import (
 )
 
 func main() {
-	provider := providers.NewOpenAIProvider(providers.WithOpenAIApiKey(os.Getenv("OPENAI_API_KEY")))
-	model := provider.LanguageModel("gpt-4o")
+	provider := providers.NewAnthropicProvider(providers.WithAnthropicAPIKey(os.Getenv("ANTHROPIC_API_KEY")))
+	model, err := provider.LanguageModel("claude-sonnet-4-20250514")
+	if err != nil {
+		fmt.Println(err)
+		return
+	}
 
 	response, err := model.Generate(context.Background(), ai.Call{
 		Prompt: ai.Prompt{

examples/stream/main.go 🔗

@@ -12,7 +12,11 @@ import (
 
 func main() {
 	provider := providers.NewOpenAIProvider(providers.WithOpenAIApiKey(os.Getenv("OPENAI_API_KEY")))
-	model := provider.LanguageModel("gpt-4o")
+	model, err := provider.LanguageModel("gpt-4o")
+	if err != nil {
+		fmt.Println(err)
+		return
+	}
 
 	stream, err := model.Stream(context.Background(), ai.Call{
 		Prompt: ai.Prompt{

examples/streaming-agent-simple/main.go 🔗

@@ -21,7 +21,11 @@ func main() {
 	provider := providers.NewOpenAIProvider(
 		providers.WithOpenAIApiKey(apiKey),
 	)
-	model := provider.LanguageModel("gpt-4o-mini")
+	model, err := provider.LanguageModel("gpt-4o-mini")
+	if err != nil {
+		fmt.Println(err)
+		return
+	}
 
 	// Create echo tool using the new type-safe API
 	type EchoInput struct {

examples/streaming-agent/main.go 🔗

@@ -12,10 +12,10 @@ import (
 
 func main() {
 	// Check for API key
-	apiKey := os.Getenv("OPENAI_API_KEY")
+	apiKey := os.Getenv("ANTHROPIC_API_KEY")
 	if apiKey == "" {
-		fmt.Println("❌ Please set OPENAI_API_KEY environment variable")
-		fmt.Println("   export OPENAI_API_KEY=your_api_key_here")
+		fmt.Println("❌ Please set ANTHROPIC_API_KEY environment variable")
+		fmt.Println("   export ANTHROPIC_API_KEY=your_api_key_here")
 		os.Exit(1)
 	}
 
@@ -24,10 +24,12 @@ func main() {
 	fmt.Println()
 
 	// Create OpenAI provider and model
-	provider := providers.NewOpenAIProvider(
-		providers.WithOpenAIApiKey(apiKey),
-	)
-	model := provider.LanguageModel("gpt-4o-mini") // Using mini for faster/cheaper responses
+	provider := providers.NewAnthropicProvider(providers.WithAnthropicAPIKey(os.Getenv("ANTHROPIC_API_KEY")))
+	model, err := provider.LanguageModel("claude-sonnet-4-20250514")
+	if err != nil {
+		fmt.Println(err)
+		return
+	}
 
 	// Define input types for type-safe tools
 	type WeatherInput struct {
@@ -194,7 +196,7 @@ func main() {
 		OnSource: func(source ai.SourceContent) {
 			fmt.Printf("📚 Source: %s (%s)\n", source.Title, source.URL)
 		},
-		OnStreamFinish: func(usage ai.Usage, finishReason ai.FinishReason, providerMetadata ai.ProviderOptions) {
+		OnStreamFinish: func(usage ai.Usage, finishReason ai.FinishReason, providerMetadata ai.ProviderMetadata) {
 			fmt.Printf("📊 Stream finished (reason: %s, tokens: %d)\n", finishReason, usage.TotalTokens)
 		},
 		OnStreamError: func(err error) {

provider.go 🔗

@@ -1,6 +1,6 @@
 package ai
 
 type Provider interface {
-	LanguageModel(modelID string) LanguageModel
+	LanguageModel(modelID string) (LanguageModel, error)
 	// TODO: add other model types when needed
 }

providers/anthropic.go 🔗

@@ -16,14 +16,14 @@ import (
 	"github.com/charmbracelet/crush/internal/ai"
 )
 
-type AnthropicThinking struct {
-	BudgetTokens int64 `json:"budget_tokens"`
+type AnthropicProviderOptions struct {
+	SendReasoning          *bool                            `json:"send_reasoning,omitempty"`
+	Thinking               *AnthropicThinkingProviderOption `json:"thinking,omitempty"`
+	DisableParallelToolUse *bool                            `json:"disable_parallel_tool_use,omitempty"`
 }
 
-type AnthropicProviderOptions struct {
-	SendReasoning          *bool              `json:"send_reasoning,omitempty"`
-	Thinking               *AnthropicThinking `json:"thinking,omitempty"`
-	DisableParallelToolUse *bool              `json:"disable_parallel_tool_use,omitempty"`
+type AnthropicThinkingProviderOption struct {
+	BudgetTokens int64 `json:"budget_tokens"`
 }
 
 type AnthropicReasoningMetadata struct {
@@ -62,7 +62,7 @@ func NewAnthropicProvider(opts ...AnthropicOption) ai.Provider {
 		o(&options)
 	}
 	if options.baseURL == "" {
-		options.baseURL = "https://api.anthropic.com/v1"
+		options.baseURL = "https://api.anthropic.com"
 	}
 
 	if options.name == "" {
@@ -104,7 +104,7 @@ func WithAnthropicHTTPClient(client option.HTTPClient) AnthropicOption {
 	}
 }
 
-func (a *anthropicProvider) LanguageModel(modelID string) ai.LanguageModel {
+func (a *anthropicProvider) LanguageModel(modelID string) (ai.LanguageModel, error) {
 	anthropicClientOptions := []option.RequestOption{}
 	if a.options.apiKey != "" {
 		anthropicClientOptions = append(anthropicClientOptions, option.WithAPIKey(a.options.apiKey))
@@ -125,7 +125,7 @@ func (a *anthropicProvider) LanguageModel(modelID string) ai.LanguageModel {
 		provider:        fmt.Sprintf("%s.messages", a.options.name),
 		providerOptions: a.options,
 		client:          anthropic.NewClient(anthropicClientOptions...),
-	}
+	}, nil
 }
 
 type anthropicLanguageModel struct {
@@ -176,6 +176,7 @@ func (a anthropicLanguageModel) prepareParams(call ai.Call) (*anthropic.MessageN
 	params.System = systemBlocks
 	params.Messages = messages
 	params.Model = anthropic.Model(a.modelID)
+	params.MaxTokens = 4096
 
 	if call.MaxOutputTokens != nil {
 		params.MaxTokens = *call.MaxOutputTokens
@@ -364,7 +365,7 @@ func toAnthropicTools(tools []ai.Tool, toolChoice *ai.ToolChoice, disableParalle
 				anthropicTool.CacheControl = anthropic.NewCacheControlEphemeralParam()
 			}
 			anthropicTools = append(anthropicTools, anthropic.ToolUnionParam{OfTool: &anthropicTool})
-
+			continue
 		}
 		// TODO: handle provider tool calls
 		warnings = append(warnings, ai.CallWarning{

providers/openai.go 🔗

@@ -133,7 +133,7 @@ func WithOpenAIHttpClient(client option.HTTPClient) OpenAIOption {
 }
 
 // LanguageModel implements ai.Provider.
-func (o *openAIProvider) LanguageModel(modelID string) ai.LanguageModel {
+func (o *openAIProvider) LanguageModel(modelID string) (ai.LanguageModel, error) {
 	openaiClientOptions := []option.RequestOption{}
 	if o.options.apiKey != "" {
 		openaiClientOptions = append(openaiClientOptions, option.WithAPIKey(o.options.apiKey))
@@ -155,7 +155,7 @@ func (o *openAIProvider) LanguageModel(modelID string) ai.LanguageModel {
 		provider:        fmt.Sprintf("%s.chat", o.options.name),
 		providerOptions: o.options,
 		client:          openai.NewClient(openaiClientOptions...),
-	}
+	}, nil
 }
 
 type openAILanguageModel struct {

providers/openai_test.go 🔗

@@ -815,7 +815,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-3.5-turbo")
+		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
 		result, err := model.Generate(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -847,7 +847,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-3.5-turbo")
+		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
 		result, err := model.Generate(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -871,7 +871,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-3.5-turbo")
+		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
 		_, err := model.Generate(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -911,7 +911,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-3.5-turbo")
+		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
 		result, err := model.Generate(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -937,7 +937,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-3.5-turbo")
+		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
 		result, err := model.Generate(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -973,7 +973,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-3.5-turbo")
+		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
 		result, err := model.Generate(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -997,7 +997,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-3.5-turbo")
+		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
 		result, err := model.Generate(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -1021,7 +1021,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-3.5-turbo")
+		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
 		_, err := model.Generate(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -1053,7 +1053,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-3.5-turbo")
+		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
 		_, err := model.Generate(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -1097,7 +1097,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("o1-mini")
+		model, _ := provider.LanguageModel("o1-mini")
 
 		_, err := model.Generate(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -1137,7 +1137,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-4o")
+		model, _ := provider.LanguageModel("gpt-4o")
 
 		_, err := model.Generate(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -1177,7 +1177,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-3.5-turbo")
+		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
 		_, err := model.Generate(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -1249,7 +1249,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-3.5-turbo")
+		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
 		result, err := model.Generate(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -1307,7 +1307,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-3.5-turbo")
+		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
 		result, err := model.Generate(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -1349,7 +1349,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-4o-mini")
+		model, _ := provider.LanguageModel("gpt-4o-mini")
 
 		result, err := model.Generate(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -1384,7 +1384,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-4o-mini")
+		model, _ := provider.LanguageModel("gpt-4o-mini")
 
 		result, err := model.Generate(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -1411,7 +1411,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("o1-preview")
+		model, _ := provider.LanguageModel("o1-preview")
 
 		result, err := model.Generate(context.Background(), ai.Call{
 			Prompt:           testPrompt,
@@ -1459,7 +1459,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("o1-preview")
+		model, _ := provider.LanguageModel("o1-preview")
 
 		_, err := model.Generate(context.Background(), ai.Call{
 			Prompt:          testPrompt,
@@ -1503,7 +1503,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("o1-preview")
+		model, _ := provider.LanguageModel("o1-preview")
 
 		result, err := model.Generate(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -1530,7 +1530,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("o1-preview")
+		model, _ := provider.LanguageModel("o1-preview")
 
 		_, err := model.Generate(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -1570,7 +1570,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-3.5-turbo")
+		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
 		_, err := model.Generate(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -1616,7 +1616,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-3.5-turbo")
+		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
 		_, err := model.Generate(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -1656,7 +1656,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-3.5-turbo")
+		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
 		_, err := model.Generate(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -1700,7 +1700,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-3.5-turbo")
+		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
 		_, err := model.Generate(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -1740,7 +1740,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-3.5-turbo")
+		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
 		_, err := model.Generate(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -1778,7 +1778,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-4o-search-preview")
+		model, _ := provider.LanguageModel("gpt-4o-search-preview")
 
 		result, err := model.Generate(context.Background(), ai.Call{
 			Prompt:      testPrompt,
@@ -1812,7 +1812,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("o3-mini")
+		model, _ := provider.LanguageModel("o3-mini")
 
 		_, err := model.Generate(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -1850,7 +1850,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-4o-mini")
+		model, _ := provider.LanguageModel("gpt-4o-mini")
 
 		result, err := model.Generate(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -1885,7 +1885,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-4o-mini")
+		model, _ := provider.LanguageModel("gpt-4o-mini")
 
 		_, err := model.Generate(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -1923,7 +1923,7 @@ func TestDoGenerate(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-3.5-turbo")
+		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
 		result, err := model.Generate(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -2232,7 +2232,7 @@ func TestDoStream(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-3.5-turbo")
+		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
 		stream, err := model.Stream(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -2288,7 +2288,7 @@ func TestDoStream(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-3.5-turbo")
+		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
 		stream, err := model.Stream(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -2374,7 +2374,7 @@ func TestDoStream(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-3.5-turbo")
+		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
 		stream, err := model.Stream(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -2413,7 +2413,7 @@ func TestDoStream(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-3.5-turbo")
+		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
 		stream, err := model.Stream(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -2454,7 +2454,7 @@ func TestDoStream(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-3.5-turbo")
+		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
 		_, err := model.Stream(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -2502,7 +2502,7 @@ func TestDoStream(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-3.5-turbo")
+		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
 		stream, err := model.Stream(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -2552,7 +2552,7 @@ func TestDoStream(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-3.5-turbo")
+		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
 		stream, err := model.Stream(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -2595,7 +2595,7 @@ func TestDoStream(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-3.5-turbo")
+		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
 		_, err := model.Stream(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -2639,7 +2639,7 @@ func TestDoStream(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-3.5-turbo")
+		model, _ := provider.LanguageModel("gpt-3.5-turbo")
 
 		_, err := model.Stream(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -2687,7 +2687,7 @@ func TestDoStream(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("o3-mini")
+		model, _ := provider.LanguageModel("o3-mini")
 
 		_, err := model.Stream(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -2731,7 +2731,7 @@ func TestDoStream(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("gpt-4o-mini")
+		model, _ := provider.LanguageModel("gpt-4o-mini")
 
 		_, err := model.Stream(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -2776,7 +2776,7 @@ func TestDoStream(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("o1-preview")
+		model, _ := provider.LanguageModel("o1-preview")
 
 		stream, err := model.Stream(context.Background(), ai.Call{
 			Prompt: testPrompt,
@@ -2822,7 +2822,7 @@ func TestDoStream(t *testing.T) {
 			WithOpenAIApiKey("test-api-key"),
 			WithOpenAIBaseURL(server.server.URL),
 		)
-		model := provider.LanguageModel("o1-preview")
+		model, _ := provider.LanguageModel("o1-preview")
 
 		stream, err := model.Stream(context.Background(), ai.Call{
 			Prompt: testPrompt,