feat: add vercel provider (#129)

Kujtim Hoxha created

Change summary

providers/vercel/language_model_hooks.go                                                            | 1052 
providers/vercel/provider_options.go                                                                |  191 
providers/vercel/vercel.go                                                                          |  114 
providertests/testdata/TestVercelCommon/claude-sonnet-4/multi_tool.yaml                             |   26 
providertests/testdata/TestVercelCommon/claude-sonnet-4/multi_tool_streaming.yaml                   |   54 
providertests/testdata/TestVercelCommon/claude-sonnet-4/simple.yaml                                 |   26 
providertests/testdata/TestVercelCommon/claude-sonnet-4/simple_streaming.yaml                       |   52 
providertests/testdata/TestVercelCommon/claude-sonnet-4/tool.yaml                                   |   26 
providertests/testdata/TestVercelCommon/claude-sonnet-4/tool_streaming.yaml                         |   44 
providertests/testdata/TestVercelCommon/gemini-2.5-flash/multi_tool.yaml                            |   26 
providertests/testdata/TestVercelCommon/gemini-2.5-flash/multi_tool_streaming.yaml                  |   36 
providertests/testdata/TestVercelCommon/gemini-2.5-flash/simple.yaml                                |   26 
providertests/testdata/TestVercelCommon/gemini-2.5-flash/simple_streaming.yaml                      |   30 
providertests/testdata/TestVercelCommon/gemini-2.5-flash/tool.yaml                                  |   26 
providertests/testdata/TestVercelCommon/gemini-2.5-flash/tool_streaming.yaml                        |   32 
providertests/testdata/TestVercelCommon/gemini-3-pro-preview/multi_tool.yaml                        |   26 
providertests/testdata/TestVercelCommon/gemini-3-pro-preview/multi_tool_streaming.yaml              |   36 
providertests/testdata/TestVercelCommon/gemini-3-pro-preview/simple.yaml                            |   26 
providertests/testdata/TestVercelCommon/gemini-3-pro-preview/simple_streaming.yaml                  |   30 
providertests/testdata/TestVercelCommon/gemini-3-pro-preview/tool.yaml                              |   26 
providertests/testdata/TestVercelCommon/gemini-3-pro-preview/tool_streaming.yaml                    |   32 
providertests/testdata/TestVercelCommon/gpt-5/multi_tool.yaml                                       |   26 
providertests/testdata/TestVercelCommon/gpt-5/multi_tool_streaming.yaml                             |   28 
providertests/testdata/TestVercelCommon/gpt-5/simple.yaml                                           |   26 
providertests/testdata/TestVercelCommon/gpt-5/simple_streaming.yaml                                 |   28 
providertests/testdata/TestVercelCommon/gpt-5/tool.yaml                                             |   26 
providertests/testdata/TestVercelCommon/gpt-5/tool_streaming.yaml                                   |   28 
providertests/testdata/TestVercelCommonWithAnthropicCache/claude-sonnet-4/multi_tool.yaml           |   26 
providertests/testdata/TestVercelCommonWithAnthropicCache/claude-sonnet-4/multi_tool_streaming.yaml |   62 
providertests/testdata/TestVercelCommonWithAnthropicCache/claude-sonnet-4/simple.yaml               |   26 
providertests/testdata/TestVercelCommonWithAnthropicCache/claude-sonnet-4/simple_streaming.yaml     |   50 
providertests/testdata/TestVercelCommonWithAnthropicCache/claude-sonnet-4/tool.yaml                 |   26 
providertests/testdata/TestVercelCommonWithAnthropicCache/claude-sonnet-4/tool_streaming.yaml       |   44 
providertests/testdata/TestVercelThinking/claude-sonnet-4-sig/thinking-streaming.yaml               |   74 
providertests/testdata/TestVercelThinking/claude-sonnet-4-sig/thinking.yaml                         |   26 
providertests/testdata/TestVercelThinking/claude-sonnet-4/thinking-streaming.yaml                   |   64 
providertests/testdata/TestVercelThinking/claude-sonnet-4/thinking.yaml                             |   26 
providertests/testdata/TestVercelThinking/gemini-3-pro-preview/thinking-streaming.yaml              |   36 
providertests/testdata/TestVercelThinking/gemini-3-pro-preview/thinking.yaml                        |   26 
providertests/testdata/TestVercelThinking/gpt-5/thinking-streaming.yaml                             |  212 
providertests/testdata/TestVercelThinking/gpt-5/thinking.yaml                                       |   26 
providertests/vercel_test.go                                                                        |  124 
42 files changed, 2,947 insertions(+)

Detailed changes

providers/vercel/language_model_hooks.go 🔗

@@ -0,0 +1,1052 @@
+package vercel
+
+import (
+	"encoding/base64"
+	"encoding/json"
+	"fmt"
+	"maps"
+	"strings"
+
+	"charm.land/fantasy"
+	"charm.land/fantasy/providers/anthropic"
+	"charm.land/fantasy/providers/google"
+	openaipkg "charm.land/fantasy/providers/openai"
+	openaisdk "github.com/openai/openai-go/v2"
+	"github.com/openai/openai-go/v2/packages/param"
+)
+
+const reasoningStartedCtx = "reasoning_started"
+
+type currentReasoningState struct {
+	metadata       *openaipkg.ResponsesReasoningMetadata
+	googleMetadata *google.ReasoningMetadata
+	googleText     string
+	anthropicSig   string
+}
+
+func languagePrepareModelCall(_ fantasy.LanguageModel, params *openaisdk.ChatCompletionNewParams, call fantasy.Call) ([]fantasy.CallWarning, error) {
+	providerOptions := &ProviderOptions{}
+	if v, ok := call.ProviderOptions[Name]; ok {
+		providerOptions, ok = v.(*ProviderOptions)
+		if !ok {
+			return nil, &fantasy.Error{Title: "invalid argument", Message: "vercel provider options should be *vercel.ProviderOptions"}
+		}
+	}
+
+	extraFields := make(map[string]any)
+
+	// Handle reasoning options
+	if providerOptions.Reasoning != nil {
+		data, err := structToMapJSON(providerOptions.Reasoning)
+		if err != nil {
+			return nil, err
+		}
+		extraFields["reasoning"] = data
+	}
+
+	// Handle provider options for gateway routing
+	if providerOptions.ProviderOptions != nil {
+		data, err := structToMapJSON(providerOptions.ProviderOptions)
+		if err != nil {
+			return nil, err
+		}
+		extraFields["providerOptions"] = map[string]any{
+			"gateway": data,
+		}
+	}
+
+	// Handle BYOK (Bring Your Own Key)
+	if providerOptions.BYOK != nil {
+		data, err := structToMapJSON(providerOptions.BYOK)
+		if err != nil {
+			return nil, err
+		}
+		if gatewayOpts, ok := extraFields["providerOptions"].(map[string]any); ok {
+			gatewayOpts["byok"] = data
+		} else {
+			extraFields["providerOptions"] = map[string]any{
+				"gateway": map[string]any{
+					"byok": data,
+				},
+			}
+		}
+	}
+
+	// Handle standard OpenAI options
+	if providerOptions.LogitBias != nil {
+		params.LogitBias = providerOptions.LogitBias
+	}
+	if providerOptions.LogProbs != nil {
+		params.Logprobs = param.NewOpt(*providerOptions.LogProbs)
+	}
+	if providerOptions.TopLogProbs != nil {
+		params.TopLogprobs = param.NewOpt(*providerOptions.TopLogProbs)
+	}
+	if providerOptions.User != nil {
+		params.User = param.NewOpt(*providerOptions.User)
+	}
+	if providerOptions.ParallelToolCalls != nil {
+		params.ParallelToolCalls = param.NewOpt(*providerOptions.ParallelToolCalls)
+	}
+
+	// Handle model fallbacks - direct models field
+	if providerOptions.ProviderOptions != nil && len(providerOptions.ProviderOptions.Models) > 0 {
+		extraFields["models"] = providerOptions.ProviderOptions.Models
+	}
+
+	maps.Copy(extraFields, providerOptions.ExtraBody)
+	params.SetExtraFields(extraFields)
+	return nil, nil
+}
+
+func languageModelExtraContent(choice openaisdk.ChatCompletionChoice) []fantasy.Content {
+	content := make([]fantasy.Content, 0)
+	reasoningData := ReasoningData{}
+	err := json.Unmarshal([]byte(choice.Message.RawJSON()), &reasoningData)
+	if err != nil {
+		return content
+	}
+
+	responsesReasoningBlocks := make([]openaipkg.ResponsesReasoningMetadata, 0)
+	anthropicReasoningBlocks := make([]struct {
+		text     string
+		metadata *anthropic.ReasoningOptionMetadata
+	}, 0)
+	googleReasoningBlocks := make([]struct {
+		text     string
+		metadata *google.ReasoningMetadata
+	}, 0)
+	otherReasoning := make([]string, 0)
+
+	for _, detail := range reasoningData.ReasoningDetails {
+		if strings.HasPrefix(detail.Format, "openai-responses") || strings.HasPrefix(detail.Format, "xai-responses") {
+			var thinkingBlock openaipkg.ResponsesReasoningMetadata
+			if len(responsesReasoningBlocks)-1 >= detail.Index {
+				thinkingBlock = responsesReasoningBlocks[detail.Index]
+			} else {
+				thinkingBlock = openaipkg.ResponsesReasoningMetadata{}
+				responsesReasoningBlocks = append(responsesReasoningBlocks, thinkingBlock)
+			}
+
+			switch detail.Type {
+			case "reasoning.summary":
+				thinkingBlock.Summary = append(thinkingBlock.Summary, detail.Summary)
+			case "reasoning.encrypted":
+				thinkingBlock.EncryptedContent = &detail.Data
+			}
+			if detail.ID != "" {
+				thinkingBlock.ItemID = detail.ID
+			}
+			responsesReasoningBlocks[detail.Index] = thinkingBlock
+			continue
+		}
+
+		if strings.HasPrefix(detail.Format, "google-gemini") {
+			var thinkingBlock struct {
+				text     string
+				metadata *google.ReasoningMetadata
+			}
+			if len(googleReasoningBlocks)-1 >= detail.Index {
+				thinkingBlock = googleReasoningBlocks[detail.Index]
+			} else {
+				thinkingBlock = struct {
+					text     string
+					metadata *google.ReasoningMetadata
+				}{metadata: &google.ReasoningMetadata{}}
+				googleReasoningBlocks = append(googleReasoningBlocks, thinkingBlock)
+			}
+
+			switch detail.Type {
+			case "reasoning.text":
+				thinkingBlock.text = detail.Text
+			case "reasoning.encrypted":
+				thinkingBlock.metadata.Signature = detail.Data
+				thinkingBlock.metadata.ToolID = detail.ID
+			}
+			googleReasoningBlocks[detail.Index] = thinkingBlock
+			continue
+		}
+
+		if strings.HasPrefix(detail.Format, "anthropic-claude") {
+			anthropicReasoningBlocks = append(anthropicReasoningBlocks, struct {
+				text     string
+				metadata *anthropic.ReasoningOptionMetadata
+			}{
+				text: detail.Text,
+				metadata: &anthropic.ReasoningOptionMetadata{
+					Signature: detail.Signature,
+				},
+			})
+			continue
+		}
+
+		otherReasoning = append(otherReasoning, detail.Text)
+	}
+
+	// Fallback to simple reasoning field if no details
+	if reasoningData.Reasoning != "" && len(reasoningData.ReasoningDetails) == 0 {
+		otherReasoning = append(otherReasoning, reasoningData.Reasoning)
+	}
+
+	for _, block := range responsesReasoningBlocks {
+		if len(block.Summary) == 0 {
+			block.Summary = []string{""}
+		}
+		content = append(content, fantasy.ReasoningContent{
+			Text: strings.Join(block.Summary, "\n"),
+			ProviderMetadata: fantasy.ProviderMetadata{
+				openaipkg.Name: &block,
+			},
+		})
+	}
+
+	for _, block := range anthropicReasoningBlocks {
+		content = append(content, fantasy.ReasoningContent{
+			Text: block.text,
+			ProviderMetadata: fantasy.ProviderMetadata{
+				anthropic.Name: block.metadata,
+			},
+		})
+	}
+
+	for _, block := range googleReasoningBlocks {
+		content = append(content, fantasy.ReasoningContent{
+			Text: block.text,
+			ProviderMetadata: fantasy.ProviderMetadata{
+				google.Name: block.metadata,
+			},
+		})
+	}
+
+	for _, reasoning := range otherReasoning {
+		if reasoning != "" {
+			content = append(content, fantasy.ReasoningContent{
+				Text: reasoning,
+			})
+		}
+	}
+
+	return content
+}
+
+func extractReasoningContext(ctx map[string]any) *currentReasoningState {
+	reasoningStarted, ok := ctx[reasoningStartedCtx]
+	if !ok {
+		return nil
+	}
+	state, ok := reasoningStarted.(*currentReasoningState)
+	if !ok {
+		return nil
+	}
+	return state
+}
+
+func languageModelStreamExtra(chunk openaisdk.ChatCompletionChunk, yield func(fantasy.StreamPart) bool, ctx map[string]any) (map[string]any, bool) {
+	if len(chunk.Choices) == 0 {
+		return ctx, true
+	}
+
+	currentState := extractReasoningContext(ctx)
+
+	inx := 0
+	choice := chunk.Choices[inx]
+	reasoningData := ReasoningData{}
+	err := json.Unmarshal([]byte(choice.Delta.RawJSON()), &reasoningData)
+	if err != nil {
+		yield(fantasy.StreamPart{
+			Type:  fantasy.StreamPartTypeError,
+			Error: &fantasy.Error{Title: "stream error", Message: "error unmarshalling delta", Cause: err},
+		})
+		return ctx, false
+	}
+
+	// Reasoning Start
+	if currentState == nil {
+		if len(reasoningData.ReasoningDetails) == 0 && reasoningData.Reasoning == "" {
+			return ctx, true
+		}
+
+		var metadata fantasy.ProviderMetadata
+		currentState = &currentReasoningState{}
+
+		if len(reasoningData.ReasoningDetails) > 0 {
+			detail := reasoningData.ReasoningDetails[0]
+
+			if strings.HasPrefix(detail.Format, "openai-responses") || strings.HasPrefix(detail.Format, "xai-responses") {
+				currentState.metadata = &openaipkg.ResponsesReasoningMetadata{
+					Summary: []string{detail.Summary},
+				}
+				metadata = fantasy.ProviderMetadata{
+					openaipkg.Name: currentState.metadata,
+				}
+				if detail.Data != "" {
+					shouldContinue := yield(fantasy.StreamPart{
+						Type:             fantasy.StreamPartTypeReasoningStart,
+						ID:               fmt.Sprintf("%d", inx),
+						Delta:            detail.Summary,
+						ProviderMetadata: metadata,
+					})
+					if !shouldContinue {
+						return ctx, false
+					}
+					return ctx, yield(fantasy.StreamPart{
+						Type: fantasy.StreamPartTypeReasoningEnd,
+						ID:   fmt.Sprintf("%d", inx),
+						ProviderMetadata: fantasy.ProviderMetadata{
+							openaipkg.Name: &openaipkg.ResponsesReasoningMetadata{
+								Summary:          []string{detail.Summary},
+								EncryptedContent: &detail.Data,
+								ItemID:           detail.ID,
+							},
+						},
+					})
+				}
+			}
+
+			if strings.HasPrefix(detail.Format, "google-gemini") {
+				if detail.Type == "reasoning.encrypted" {
+					ctx[reasoningStartedCtx] = nil
+					if !yield(fantasy.StreamPart{
+						Type: fantasy.StreamPartTypeReasoningStart,
+						ID:   fmt.Sprintf("%d", inx),
+					}) {
+						return ctx, false
+					}
+					return ctx, yield(fantasy.StreamPart{
+						Type: fantasy.StreamPartTypeReasoningEnd,
+						ID:   fmt.Sprintf("%d", inx),
+						ProviderMetadata: fantasy.ProviderMetadata{
+							google.Name: &google.ReasoningMetadata{
+								Signature: detail.Data,
+								ToolID:    detail.ID,
+							},
+						},
+					})
+				}
+				currentState.googleMetadata = &google.ReasoningMetadata{}
+				currentState.googleText = detail.Text
+				metadata = fantasy.ProviderMetadata{
+					google.Name: currentState.googleMetadata,
+				}
+			}
+
+			if strings.HasPrefix(detail.Format, "anthropic-claude") {
+				currentState.anthropicSig = detail.Signature
+			}
+		}
+
+		ctx[reasoningStartedCtx] = currentState
+		delta := reasoningData.Reasoning
+		if len(reasoningData.ReasoningDetails) > 0 {
+			delta = reasoningData.ReasoningDetails[0].Summary
+			if strings.HasPrefix(reasoningData.ReasoningDetails[0].Format, "google-gemini") {
+				delta = reasoningData.ReasoningDetails[0].Text
+			}
+			if strings.HasPrefix(reasoningData.ReasoningDetails[0].Format, "anthropic-claude") {
+				delta = reasoningData.ReasoningDetails[0].Text
+			}
+		}
+		return ctx, yield(fantasy.StreamPart{
+			Type:             fantasy.StreamPartTypeReasoningStart,
+			ID:               fmt.Sprintf("%d", inx),
+			Delta:            delta,
+			ProviderMetadata: metadata,
+		})
+	}
+
+	if len(reasoningData.ReasoningDetails) == 0 && reasoningData.Reasoning == "" {
+		if choice.Delta.Content != "" || len(choice.Delta.ToolCalls) > 0 {
+			ctx[reasoningStartedCtx] = nil
+			return ctx, yield(fantasy.StreamPart{
+				Type: fantasy.StreamPartTypeReasoningEnd,
+				ID:   fmt.Sprintf("%d", inx),
+			})
+		}
+		return ctx, true
+	}
+
+	if len(reasoningData.ReasoningDetails) > 0 {
+		detail := reasoningData.ReasoningDetails[0]
+
+		if strings.HasPrefix(detail.Format, "openai-responses") || strings.HasPrefix(detail.Format, "xai-responses") {
+			if detail.Data != "" {
+				currentState.metadata.EncryptedContent = &detail.Data
+				currentState.metadata.ItemID = detail.ID
+				ctx[reasoningStartedCtx] = nil
+				return ctx, yield(fantasy.StreamPart{
+					Type: fantasy.StreamPartTypeReasoningEnd,
+					ID:   fmt.Sprintf("%d", inx),
+					ProviderMetadata: fantasy.ProviderMetadata{
+						openaipkg.Name: currentState.metadata,
+					},
+				})
+			}
+			var textDelta string
+			if len(currentState.metadata.Summary)-1 >= detail.Index {
+				currentState.metadata.Summary[detail.Index] += detail.Summary
+				textDelta = detail.Summary
+			} else {
+				currentState.metadata.Summary = append(currentState.metadata.Summary, detail.Summary)
+				textDelta = "\n" + detail.Summary
+			}
+			ctx[reasoningStartedCtx] = currentState
+			return ctx, yield(fantasy.StreamPart{
+				Type:  fantasy.StreamPartTypeReasoningDelta,
+				ID:    fmt.Sprintf("%d", inx),
+				Delta: textDelta,
+				ProviderMetadata: fantasy.ProviderMetadata{
+					openaipkg.Name: currentState.metadata,
+				},
+			})
+		}
+
+		if strings.HasPrefix(detail.Format, "anthropic-claude") {
+			if detail.Signature != "" {
+				metadata := fantasy.ProviderMetadata{
+					anthropic.Name: &anthropic.ReasoningOptionMetadata{
+						Signature: detail.Signature,
+					},
+				}
+				shouldContinue := yield(fantasy.StreamPart{
+					Type:             fantasy.StreamPartTypeReasoningDelta,
+					ID:               fmt.Sprintf("%d", inx),
+					Delta:            detail.Text,
+					ProviderMetadata: metadata,
+				})
+				if !shouldContinue {
+					return ctx, false
+				}
+				ctx[reasoningStartedCtx] = nil
+				return ctx, yield(fantasy.StreamPart{
+					Type: fantasy.StreamPartTypeReasoningEnd,
+					ID:   fmt.Sprintf("%d", inx),
+				})
+			}
+			return ctx, yield(fantasy.StreamPart{
+				Type:  fantasy.StreamPartTypeReasoningDelta,
+				ID:    fmt.Sprintf("%d", inx),
+				Delta: detail.Text,
+			})
+		}
+
+		if strings.HasPrefix(detail.Format, "google-gemini") {
+			if detail.Type == "reasoning.text" {
+				currentState.googleText += detail.Text
+				ctx[reasoningStartedCtx] = currentState
+				return ctx, yield(fantasy.StreamPart{
+					Type:  fantasy.StreamPartTypeReasoningDelta,
+					ID:    fmt.Sprintf("%d", inx),
+					Delta: detail.Text,
+				})
+			}
+			if detail.Type == "reasoning.encrypted" {
+				currentState.googleMetadata.Signature = detail.Data
+				currentState.googleMetadata.ToolID = detail.ID
+				metadata := fantasy.ProviderMetadata{
+					google.Name: currentState.googleMetadata,
+				}
+				ctx[reasoningStartedCtx] = nil
+				return ctx, yield(fantasy.StreamPart{
+					Type:             fantasy.StreamPartTypeReasoningEnd,
+					ID:               fmt.Sprintf("%d", inx),
+					ProviderMetadata: metadata,
+				})
+			}
+		}
+
+		return ctx, yield(fantasy.StreamPart{
+			Type:  fantasy.StreamPartTypeReasoningDelta,
+			ID:    fmt.Sprintf("%d", inx),
+			Delta: detail.Text,
+		})
+	}
+
+	if reasoningData.Reasoning != "" {
+		return ctx, yield(fantasy.StreamPart{
+			Type:  fantasy.StreamPartTypeReasoningDelta,
+			ID:    fmt.Sprintf("%d", inx),
+			Delta: reasoningData.Reasoning,
+		})
+	}
+
+	return ctx, true
+}
+
+func languageModelUsage(response openaisdk.ChatCompletion) (fantasy.Usage, fantasy.ProviderOptionsData) {
+	if len(response.Choices) == 0 {
+		return fantasy.Usage{}, nil
+	}
+
+	usage := response.Usage
+	completionTokenDetails := usage.CompletionTokensDetails
+	promptTokenDetails := usage.PromptTokensDetails
+
+	var provider string
+	if p, ok := response.JSON.ExtraFields["provider"]; ok {
+		provider = p.Raw()
+	}
+
+	providerMetadata := &ProviderMetadata{
+		Provider: provider,
+	}
+
+	return fantasy.Usage{
+		InputTokens:     usage.PromptTokens,
+		OutputTokens:    usage.CompletionTokens,
+		TotalTokens:     usage.TotalTokens,
+		ReasoningTokens: completionTokenDetails.ReasoningTokens,
+		CacheReadTokens: promptTokenDetails.CachedTokens,
+	}, providerMetadata
+}
+
+func languageModelStreamUsage(chunk openaisdk.ChatCompletionChunk, _ map[string]any, metadata fantasy.ProviderMetadata) (fantasy.Usage, fantasy.ProviderMetadata) {
+	usage := chunk.Usage
+	if usage.TotalTokens == 0 {
+		return fantasy.Usage{}, nil
+	}
+
+	streamProviderMetadata := &ProviderMetadata{}
+	if metadata != nil {
+		if providerMetadata, ok := metadata[Name]; ok {
+			converted, ok := providerMetadata.(*ProviderMetadata)
+			if ok {
+				streamProviderMetadata = converted
+			}
+		}
+	}
+
+	if p, ok := chunk.JSON.ExtraFields["provider"]; ok {
+		streamProviderMetadata.Provider = p.Raw()
+	}
+
+	completionTokenDetails := usage.CompletionTokensDetails
+	promptTokenDetails := usage.PromptTokensDetails
+	aiUsage := fantasy.Usage{
+		InputTokens:     usage.PromptTokens,
+		OutputTokens:    usage.CompletionTokens,
+		TotalTokens:     usage.TotalTokens,
+		ReasoningTokens: completionTokenDetails.ReasoningTokens,
+		CacheReadTokens: promptTokenDetails.CachedTokens,
+	}
+
+	return aiUsage, fantasy.ProviderMetadata{
+		Name: streamProviderMetadata,
+	}
+}
+
+func languageModelToPrompt(prompt fantasy.Prompt, _, model string) ([]openaisdk.ChatCompletionMessageParamUnion, []fantasy.CallWarning) {
+	var messages []openaisdk.ChatCompletionMessageParamUnion
+	var warnings []fantasy.CallWarning
+
+	for _, msg := range prompt {
+		switch msg.Role {
+		case fantasy.MessageRoleSystem:
+			var systemPromptParts []string
+			for _, c := range msg.Content {
+				if c.GetType() != fantasy.ContentTypeText {
+					warnings = append(warnings, fantasy.CallWarning{
+						Type:    fantasy.CallWarningTypeOther,
+						Message: "system prompt can only have text content",
+					})
+					continue
+				}
+				textPart, ok := fantasy.AsContentType[fantasy.TextPart](c)
+				if !ok {
+					warnings = append(warnings, fantasy.CallWarning{
+						Type:    fantasy.CallWarningTypeOther,
+						Message: "system prompt text part does not have the right type",
+					})
+					continue
+				}
+				text := textPart.Text
+				if strings.TrimSpace(text) != "" {
+					systemPromptParts = append(systemPromptParts, textPart.Text)
+				}
+			}
+			if len(systemPromptParts) == 0 {
+				warnings = append(warnings, fantasy.CallWarning{
+					Type:    fantasy.CallWarningTypeOther,
+					Message: "system prompt has no text parts",
+				})
+				continue
+			}
+			systemMsg := openaisdk.SystemMessage(strings.Join(systemPromptParts, "\n"))
+			anthropicCache := anthropic.GetCacheControl(msg.ProviderOptions)
+			if anthropicCache != nil {
+				systemMsg.OfSystem.SetExtraFields(map[string]any{
+					"cache_control": map[string]string{
+						"type": anthropicCache.Type,
+					},
+				})
+			}
+			messages = append(messages, systemMsg)
+
+		case fantasy.MessageRoleUser:
+			if len(msg.Content) == 1 && msg.Content[0].GetType() == fantasy.ContentTypeText {
+				textPart, ok := fantasy.AsContentType[fantasy.TextPart](msg.Content[0])
+				if !ok {
+					warnings = append(warnings, fantasy.CallWarning{
+						Type:    fantasy.CallWarningTypeOther,
+						Message: "user message text part does not have the right type",
+					})
+					continue
+				}
+				userMsg := openaisdk.UserMessage(textPart.Text)
+				anthropicCache := anthropic.GetCacheControl(msg.ProviderOptions)
+				if anthropicCache != nil {
+					userMsg.OfUser.SetExtraFields(map[string]any{
+						"cache_control": map[string]string{
+							"type": anthropicCache.Type,
+						},
+					})
+				}
+				messages = append(messages, userMsg)
+				continue
+			}
+
+			var content []openaisdk.ChatCompletionContentPartUnionParam
+			for i, c := range msg.Content {
+				isLastPart := i == len(msg.Content)-1
+				cacheControl := anthropic.GetCacheControl(c.Options())
+				if cacheControl == nil && isLastPart {
+					cacheControl = anthropic.GetCacheControl(msg.ProviderOptions)
+				}
+				switch c.GetType() {
+				case fantasy.ContentTypeText:
+					textPart, ok := fantasy.AsContentType[fantasy.TextPart](c)
+					if !ok {
+						warnings = append(warnings, fantasy.CallWarning{
+							Type:    fantasy.CallWarningTypeOther,
+							Message: "user message text part does not have the right type",
+						})
+						continue
+					}
+					part := openaisdk.ChatCompletionContentPartUnionParam{
+						OfText: &openaisdk.ChatCompletionContentPartTextParam{
+							Text: textPart.Text,
+						},
+					}
+					if cacheControl != nil {
+						part.OfText.SetExtraFields(map[string]any{
+							"cache_control": map[string]string{
+								"type": cacheControl.Type,
+							},
+						})
+					}
+					content = append(content, part)
+				case fantasy.ContentTypeFile:
+					filePart, ok := fantasy.AsContentType[fantasy.FilePart](c)
+					if !ok {
+						warnings = append(warnings, fantasy.CallWarning{
+							Type:    fantasy.CallWarningTypeOther,
+							Message: "user message file part does not have the right type",
+						})
+						continue
+					}
+					switch {
+					case strings.HasPrefix(filePart.MediaType, "image/"):
+						base64Encoded := base64.StdEncoding.EncodeToString(filePart.Data)
+						data := "data:" + filePart.MediaType + ";base64," + base64Encoded
+						imageURL := openaisdk.ChatCompletionContentPartImageImageURLParam{URL: data}
+						if providerOptions, ok := filePart.ProviderOptions[openaipkg.Name]; ok {
+							if detail, ok := providerOptions.(*openaipkg.ProviderFileOptions); ok {
+								imageURL.Detail = detail.ImageDetail
+							}
+						}
+						imageBlock := openaisdk.ChatCompletionContentPartImageParam{ImageURL: imageURL}
+						if cacheControl != nil {
+							imageBlock.SetExtraFields(map[string]any{
+								"cache_control": map[string]string{
+									"type": cacheControl.Type,
+								},
+							})
+						}
+						content = append(content, openaisdk.ChatCompletionContentPartUnionParam{OfImageURL: &imageBlock})
+
+					case filePart.MediaType == "audio/wav":
+						base64Encoded := base64.StdEncoding.EncodeToString(filePart.Data)
+						audioBlock := openaisdk.ChatCompletionContentPartInputAudioParam{
+							InputAudio: openaisdk.ChatCompletionContentPartInputAudioInputAudioParam{
+								Data:   base64Encoded,
+								Format: "wav",
+							},
+						}
+						if cacheControl != nil {
+							audioBlock.SetExtraFields(map[string]any{
+								"cache_control": map[string]string{
+									"type": cacheControl.Type,
+								},
+							})
+						}
+						content = append(content, openaisdk.ChatCompletionContentPartUnionParam{OfInputAudio: &audioBlock})
+
+					case filePart.MediaType == "audio/mpeg" || filePart.MediaType == "audio/mp3":
+						base64Encoded := base64.StdEncoding.EncodeToString(filePart.Data)
+						audioBlock := openaisdk.ChatCompletionContentPartInputAudioParam{
+							InputAudio: openaisdk.ChatCompletionContentPartInputAudioInputAudioParam{
+								Data:   base64Encoded,
+								Format: "mp3",
+							},
+						}
+						if cacheControl != nil {
+							audioBlock.SetExtraFields(map[string]any{
+								"cache_control": map[string]string{
+									"type": cacheControl.Type,
+								},
+							})
+						}
+						content = append(content, openaisdk.ChatCompletionContentPartUnionParam{OfInputAudio: &audioBlock})
+
+					case filePart.MediaType == "application/pdf":
+						dataStr := string(filePart.Data)
+						if strings.HasPrefix(dataStr, "file-") {
+							fileBlock := openaisdk.ChatCompletionContentPartFileParam{
+								File: openaisdk.ChatCompletionContentPartFileFileParam{
+									FileID: param.NewOpt(dataStr),
+								},
+							}
+							if cacheControl != nil {
+								fileBlock.SetExtraFields(map[string]any{
+									"cache_control": map[string]string{
+										"type": cacheControl.Type,
+									},
+								})
+							}
+							content = append(content, openaisdk.ChatCompletionContentPartUnionParam{OfFile: &fileBlock})
+						} else {
+							base64Encoded := base64.StdEncoding.EncodeToString(filePart.Data)
+							data := "data:application/pdf;base64," + base64Encoded
+							filename := filePart.Filename
+							if filename == "" {
+								filename = fmt.Sprintf("part-%d.pdf", len(content))
+							}
+							fileBlock := openaisdk.ChatCompletionContentPartFileParam{
+								File: openaisdk.ChatCompletionContentPartFileFileParam{
+									Filename: param.NewOpt(filename),
+									FileData: param.NewOpt(data),
+								},
+							}
+							if cacheControl != nil {
+								fileBlock.SetExtraFields(map[string]any{
+									"cache_control": map[string]string{
+										"type": cacheControl.Type,
+									},
+								})
+							}
+							content = append(content, openaisdk.ChatCompletionContentPartUnionParam{OfFile: &fileBlock})
+						}
+
+					default:
+						warnings = append(warnings, fantasy.CallWarning{
+							Type:    fantasy.CallWarningTypeOther,
+							Message: fmt.Sprintf("file part media type %s not supported", filePart.MediaType),
+						})
+					}
+				}
+			}
+			if !hasVisibleUserContent(content) {
+				warnings = append(warnings, fantasy.CallWarning{
+					Type:    fantasy.CallWarningTypeOther,
+					Message: "dropping empty user message (contains neither user-facing content nor tool results)",
+				})
+				continue
+			}
+			messages = append(messages, openaisdk.UserMessage(content))
+
+		case fantasy.MessageRoleAssistant:
+			if len(msg.Content) == 1 && msg.Content[0].GetType() == fantasy.ContentTypeText {
+				textPart, ok := fantasy.AsContentType[fantasy.TextPart](msg.Content[0])
+				if !ok {
+					warnings = append(warnings, fantasy.CallWarning{
+						Type:    fantasy.CallWarningTypeOther,
+						Message: "assistant message text part does not have the right type",
+					})
+					continue
+				}
+				assistantMsg := openaisdk.AssistantMessage(textPart.Text)
+				anthropicCache := anthropic.GetCacheControl(msg.ProviderOptions)
+				if anthropicCache != nil {
+					assistantMsg.OfAssistant.SetExtraFields(map[string]any{
+						"cache_control": map[string]string{
+							"type": anthropicCache.Type,
+						},
+					})
+				}
+				messages = append(messages, assistantMsg)
+				continue
+			}
+
+			assistantMsg := openaisdk.ChatCompletionAssistantMessageParam{
+				Role: "assistant",
+			}
+			for i, c := range msg.Content {
+				isLastPart := i == len(msg.Content)-1
+				cacheControl := anthropic.GetCacheControl(c.Options())
+				if cacheControl == nil && isLastPart {
+					cacheControl = anthropic.GetCacheControl(msg.ProviderOptions)
+				}
+				switch c.GetType() {
+				case fantasy.ContentTypeText:
+					textPart, ok := fantasy.AsContentType[fantasy.TextPart](c)
+					if !ok {
+						warnings = append(warnings, fantasy.CallWarning{
+							Type:    fantasy.CallWarningTypeOther,
+							Message: "assistant message text part does not have the right type",
+						})
+						continue
+					}
+					if assistantMsg.Content.OfString.Valid() {
+						textPart.Text = assistantMsg.Content.OfString.Value + "\n" + textPart.Text
+					}
+					assistantMsg.Content = openaisdk.ChatCompletionAssistantMessageParamContentUnion{
+						OfString: param.NewOpt(textPart.Text),
+					}
+					if cacheControl != nil {
+						assistantMsg.Content.SetExtraFields(map[string]any{
+							"cache_control": map[string]string{
+								"type": cacheControl.Type,
+							},
+						})
+					}
+				case fantasy.ContentTypeReasoning:
+					reasoningPart, ok := fantasy.AsContentType[fantasy.ReasoningPart](c)
+					if !ok {
+						warnings = append(warnings, fantasy.CallWarning{
+							Type:    fantasy.CallWarningTypeOther,
+							Message: "assistant message reasoning part does not have the right type",
+						})
+						continue
+					}
+					var reasoningDetails []ReasoningDetail
+					switch {
+					case strings.HasPrefix(model, "anthropic/") && reasoningPart.Text != "":
+						metadata := anthropic.GetReasoningMetadata(reasoningPart.Options())
+						if metadata == nil {
+							text := fmt.Sprintf("<thoughts>%s</thoughts>", reasoningPart.Text)
+							if assistantMsg.Content.OfString.Valid() {
+								text = assistantMsg.Content.OfString.Value + "\n" + text
+							}
+							assistantMsg.Content = openaisdk.ChatCompletionAssistantMessageParamContentUnion{
+								OfString: param.NewOpt(text),
+							}
+							if cacheControl != nil {
+								assistantMsg.Content.SetExtraFields(map[string]any{
+									"cache_control": map[string]string{
+										"type": cacheControl.Type,
+									},
+								})
+							}
+							continue
+						}
+						reasoningDetails = append(reasoningDetails, ReasoningDetail{
+							Format:    "anthropic-claude-v1",
+							Type:      "reasoning.text",
+							Text:      reasoningPart.Text,
+							Signature: metadata.Signature,
+						})
+						data, _ := json.Marshal(reasoningDetails)
+						reasoningDetailsMap := []map[string]any{}
+						_ = json.Unmarshal(data, &reasoningDetailsMap)
+						assistantMsg.SetExtraFields(map[string]any{
+							"reasoning_details": reasoningDetailsMap,
+							"reasoning":         reasoningPart.Text,
+						})
+					case strings.HasPrefix(model, "openai/"):
+						metadata := openaipkg.GetReasoningMetadata(reasoningPart.Options())
+						if metadata == nil {
+							text := fmt.Sprintf("<thoughts>%s</thoughts>", reasoningPart.Text)
+							if assistantMsg.Content.OfString.Valid() {
+								text = assistantMsg.Content.OfString.Value + "\n" + text
+							}
+							assistantMsg.Content = openaisdk.ChatCompletionAssistantMessageParamContentUnion{
+								OfString: param.NewOpt(text),
+							}
+							continue
+						}
+						for inx, summary := range metadata.Summary {
+							if summary == "" {
+								continue
+							}
+							reasoningDetails = append(reasoningDetails, ReasoningDetail{
+								Type:    "reasoning.summary",
+								Format:  "openai-responses-v1",
+								Summary: summary,
+								Index:   inx,
+							})
+						}
+						reasoningDetails = append(reasoningDetails, ReasoningDetail{
+							Type:   "reasoning.encrypted",
+							Format: "openai-responses-v1",
+							Data:   *metadata.EncryptedContent,
+							ID:     metadata.ItemID,
+						})
+						data, _ := json.Marshal(reasoningDetails)
+						reasoningDetailsMap := []map[string]any{}
+						_ = json.Unmarshal(data, &reasoningDetailsMap)
+						assistantMsg.SetExtraFields(map[string]any{
+							"reasoning_details": reasoningDetailsMap,
+						})
+					case strings.HasPrefix(model, "google/"):
+						metadata := google.GetReasoningMetadata(reasoningPart.Options())
+						if metadata == nil {
+							text := fmt.Sprintf("<thoughts>%s</thoughts>", reasoningPart.Text)
+							if assistantMsg.Content.OfString.Valid() {
+								text = assistantMsg.Content.OfString.Value + "\n" + text
+							}
+							assistantMsg.Content = openaisdk.ChatCompletionAssistantMessageParamContentUnion{
+								OfString: param.NewOpt(text),
+							}
+							continue
+						}
+						if reasoningPart.Text != "" {
+							reasoningDetails = append(reasoningDetails, ReasoningDetail{
+								Type:   "reasoning.text",
+								Format: "google-gemini-v1",
+								Text:   reasoningPart.Text,
+							})
+						}
+						reasoningDetails = append(reasoningDetails, ReasoningDetail{
+							Type:   "reasoning.encrypted",
+							Format: "google-gemini-v1",
+							Data:   metadata.Signature,
+							ID:     metadata.ToolID,
+						})
+						data, _ := json.Marshal(reasoningDetails)
+						reasoningDetailsMap := []map[string]any{}
+						_ = json.Unmarshal(data, &reasoningDetailsMap)
+						assistantMsg.SetExtraFields(map[string]any{
+							"reasoning_details": reasoningDetailsMap,
+						})
+					default:
+						reasoningDetails = append(reasoningDetails, ReasoningDetail{
+							Type:   "reasoning.text",
+							Text:   reasoningPart.Text,
+							Format: "unknown",
+						})
+						data, _ := json.Marshal(reasoningDetails)
+						reasoningDetailsMap := []map[string]any{}
+						_ = json.Unmarshal(data, &reasoningDetailsMap)
+						assistantMsg.SetExtraFields(map[string]any{
+							"reasoning_details": reasoningDetailsMap,
+						})
+					}
+				case fantasy.ContentTypeToolCall:
+					toolCallPart, ok := fantasy.AsContentType[fantasy.ToolCallPart](c)
+					if !ok {
+						warnings = append(warnings, fantasy.CallWarning{
+							Type:    fantasy.CallWarningTypeOther,
+							Message: "assistant message tool part does not have the right type",
+						})
+						continue
+					}
+					tc := openaisdk.ChatCompletionMessageToolCallUnionParam{
+						OfFunction: &openaisdk.ChatCompletionMessageFunctionToolCallParam{
+							ID:   toolCallPart.ToolCallID,
+							Type: "function",
+							Function: openaisdk.ChatCompletionMessageFunctionToolCallFunctionParam{
+								Name:      toolCallPart.ToolName,
+								Arguments: toolCallPart.Input,
+							},
+						},
+					}
+					if cacheControl != nil {
+						tc.OfFunction.SetExtraFields(map[string]any{
+							"cache_control": map[string]string{
+								"type": cacheControl.Type,
+							},
+						})
+					}
+					assistantMsg.ToolCalls = append(assistantMsg.ToolCalls, tc)
+				}
+			}
+			messages = append(messages, openaisdk.ChatCompletionMessageParamUnion{
+				OfAssistant: &assistantMsg,
+			})
+
+		case fantasy.MessageRoleTool:
+			for i, c := range msg.Content {
+				isLastPart := i == len(msg.Content)-1
+				cacheControl := anthropic.GetCacheControl(c.Options())
+				if cacheControl == nil && isLastPart {
+					cacheControl = anthropic.GetCacheControl(msg.ProviderOptions)
+				}
+				if c.GetType() != fantasy.ContentTypeToolResult {
+					warnings = append(warnings, fantasy.CallWarning{
+						Type:    fantasy.CallWarningTypeOther,
+						Message: "tool message can only have tool result content",
+					})
+					continue
+				}
+				toolResultPart, ok := fantasy.AsContentType[fantasy.ToolResultPart](c)
+				if !ok {
+					warnings = append(warnings, fantasy.CallWarning{
+						Type:    fantasy.CallWarningTypeOther,
+						Message: "tool message result part does not have the right type",
+					})
+					continue
+				}
+				switch toolResultPart.Output.GetType() {
+				case fantasy.ToolResultContentTypeText:
+					output, ok := fantasy.AsToolResultOutputType[fantasy.ToolResultOutputContentText](toolResultPart.Output)
+					if !ok {
+						warnings = append(warnings, fantasy.CallWarning{
+							Type:    fantasy.CallWarningTypeOther,
+							Message: "tool result output does not have the right type",
+						})
+						continue
+					}
+					tr := openaisdk.ToolMessage(output.Text, toolResultPart.ToolCallID)
+					if cacheControl != nil {
+						tr.SetExtraFields(map[string]any{
+							"cache_control": map[string]string{
+								"type": cacheControl.Type,
+							},
+						})
+					}
+					messages = append(messages, tr)
+				case fantasy.ToolResultContentTypeError:
+					output, ok := fantasy.AsToolResultOutputType[fantasy.ToolResultOutputContentError](toolResultPart.Output)
+					if !ok {
+						warnings = append(warnings, fantasy.CallWarning{
+							Type:    fantasy.CallWarningTypeOther,
+							Message: "tool result output does not have the right type",
+						})
+						continue
+					}
+					tr := openaisdk.ToolMessage(output.Error.Error(), toolResultPart.ToolCallID)
+					if cacheControl != nil {
+						tr.SetExtraFields(map[string]any{
+							"cache_control": map[string]string{
+								"type": cacheControl.Type,
+							},
+						})
+					}
+					messages = append(messages, tr)
+				}
+			}
+		}
+	}
+	return messages, warnings
+}
+
+func hasVisibleUserContent(content []openaisdk.ChatCompletionContentPartUnionParam) bool {
+	for _, part := range content {
+		if part.OfText != nil || part.OfImageURL != nil || part.OfInputAudio != nil || part.OfFile != nil {
+			return true
+		}
+	}
+	return false
+}
+
+func structToMapJSON(s any) (map[string]any, error) {
+	var result map[string]any
+	jsonBytes, err := json.Marshal(s)
+	if err != nil {
+		return nil, err
+	}
+	err = json.Unmarshal(jsonBytes, &result)
+	if err != nil {
+		return nil, err
+	}
+	return result, nil
+}

providers/vercel/provider_options.go 🔗

@@ -0,0 +1,191 @@
+// Package vercel provides an implementation of the fantasy AI SDK for Vercel AI Gateway.
+package vercel
+
+import (
+	"encoding/json"
+
+	"charm.land/fantasy"
+)
+
+// Global type identifiers for Vercel-specific provider data.
+const (
+	TypeProviderOptions  = Name + ".options"
+	TypeProviderMetadata = Name + ".metadata"
+)
+
+// Register Vercel provider-specific types with the global registry.
+func init() {
+	fantasy.RegisterProviderType(TypeProviderOptions, func(data []byte) (fantasy.ProviderOptionsData, error) {
+		var v ProviderOptions
+		if err := json.Unmarshal(data, &v); err != nil {
+			return nil, err
+		}
+		return &v, nil
+	})
+	fantasy.RegisterProviderType(TypeProviderMetadata, func(data []byte) (fantasy.ProviderOptionsData, error) {
+		var v ProviderMetadata
+		if err := json.Unmarshal(data, &v); err != nil {
+			return nil, err
+		}
+		return &v, nil
+	})
+}
+
+// ReasoningEffort represents the reasoning effort level for Vercel AI Gateway.
+type ReasoningEffort string
+
+const (
+	// ReasoningEffortNone disables reasoning.
+	ReasoningEffortNone ReasoningEffort = "none"
+	// ReasoningEffortMinimal represents minimal reasoning effort (~10% of max_tokens).
+	ReasoningEffortMinimal ReasoningEffort = "minimal"
+	// ReasoningEffortLow represents low reasoning effort (~20% of max_tokens).
+	ReasoningEffortLow ReasoningEffort = "low"
+	// ReasoningEffortMedium represents medium reasoning effort (~50% of max_tokens).
+	ReasoningEffortMedium ReasoningEffort = "medium"
+	// ReasoningEffortHigh represents high reasoning effort (~80% of max_tokens).
+	ReasoningEffortHigh ReasoningEffort = "high"
+	// ReasoningEffortXHigh represents extra high reasoning effort (~95% of max_tokens).
+	ReasoningEffortXHigh ReasoningEffort = "xhigh"
+)
+
+// ReasoningOptions represents reasoning configuration for Vercel AI Gateway.
+type ReasoningOptions struct {
+	// Enabled enables reasoning output. When true, the model will provide its reasoning process.
+	Enabled *bool `json:"enabled,omitempty"`
+	// MaxTokens is the maximum number of tokens to allocate for reasoning.
+	// Cannot be used with Effort.
+	MaxTokens *int64 `json:"max_tokens,omitempty"`
+	// Effort controls reasoning effort level.
+	// Mutually exclusive with MaxTokens.
+	Effort *ReasoningEffort `json:"effort,omitempty"`
+	// Exclude excludes reasoning content from the response but still generates it internally.
+	Exclude *bool `json:"exclude,omitempty"`
+}
+
+// GatewayProviderOptions represents provider routing preferences for Vercel AI Gateway.
+type GatewayProviderOptions struct {
+	// Order is the list of provider slugs to try in order (e.g. ["vertex", "anthropic"]).
+	Order []string `json:"order,omitempty"`
+	// Models is the list of fallback models to try if the primary model fails.
+	Models []string `json:"models,omitempty"`
+}
+
+// BYOKCredential represents a single provider credential for BYOK.
+type BYOKCredential struct {
+	APIKey string `json:"apiKey,omitempty"`
+}
+
+// BYOKOptions represents Bring Your Own Key options for Vercel AI Gateway.
+type BYOKOptions struct {
+	Anthropic map[string][]BYOKCredential `json:"anthropic,omitempty"`
+	OpenAI    map[string][]BYOKCredential `json:"openai,omitempty"`
+	Vertex    map[string][]BYOKCredential `json:"vertex,omitempty"`
+	Bedrock   map[string][]BYOKCredential `json:"bedrock,omitempty"`
+}
+
+// ProviderOptions represents additional options for Vercel AI Gateway provider.
+type ProviderOptions struct {
+	// Reasoning configuration for models that support extended thinking.
+	Reasoning *ReasoningOptions `json:"reasoning,omitempty"`
+	// ProviderOptions for gateway routing preferences.
+	ProviderOptions *GatewayProviderOptions `json:"providerOptions,omitempty"`
+	// BYOK for request-scoped provider credentials.
+	BYOK *BYOKOptions `json:"byok,omitempty"`
+	// User is a unique identifier representing your end-user.
+	User *string `json:"user,omitempty"`
+	// LogitBias modifies the likelihood of specified tokens appearing in the completion.
+	LogitBias map[string]int64 `json:"logit_bias,omitempty"`
+	// LogProbs returns the log probabilities of the tokens.
+	LogProbs *bool `json:"logprobs,omitempty"`
+	// TopLogProbs is the number of top log probabilities to return.
+	TopLogProbs *int64 `json:"top_logprobs,omitempty"`
+	// ParallelToolCalls enables parallel function calling during tool use.
+	ParallelToolCalls *bool `json:"parallel_tool_calls,omitempty"`
+	// ExtraBody for additional request body fields.
+	ExtraBody map[string]any `json:"extra_body,omitempty"`
+}
+
+// Options implements the ProviderOptionsData interface for ProviderOptions.
+func (*ProviderOptions) Options() {}
+
+// MarshalJSON implements custom JSON marshaling with type info for ProviderOptions.
+func (o ProviderOptions) MarshalJSON() ([]byte, error) {
+	type plain ProviderOptions
+	return fantasy.MarshalProviderType(TypeProviderOptions, plain(o))
+}
+
+// UnmarshalJSON implements custom JSON unmarshaling with type info for ProviderOptions.
+func (o *ProviderOptions) UnmarshalJSON(data []byte) error {
+	type plain ProviderOptions
+	var p plain
+	if err := fantasy.UnmarshalProviderType(data, &p); err != nil {
+		return err
+	}
+	*o = ProviderOptions(p)
+	return nil
+}
+
+// ProviderMetadata represents metadata from Vercel AI Gateway provider.
+type ProviderMetadata struct {
+	Provider string `json:"provider,omitempty"`
+}
+
+// Options implements the ProviderOptionsData interface for ProviderMetadata.
+func (*ProviderMetadata) Options() {}
+
+// MarshalJSON implements custom JSON marshaling with type info for ProviderMetadata.
+func (m ProviderMetadata) MarshalJSON() ([]byte, error) {
+	type plain ProviderMetadata
+	return fantasy.MarshalProviderType(TypeProviderMetadata, plain(m))
+}
+
+// UnmarshalJSON implements custom JSON unmarshaling with type info for ProviderMetadata.
+func (m *ProviderMetadata) UnmarshalJSON(data []byte) error {
+	type plain ProviderMetadata
+	var p plain
+	if err := fantasy.UnmarshalProviderType(data, &p); err != nil {
+		return err
+	}
+	*m = ProviderMetadata(p)
+	return nil
+}
+
+// ReasoningDetail represents a reasoning detail from Vercel AI Gateway.
+type ReasoningDetail struct {
+	ID        string `json:"id,omitempty"`
+	Type      string `json:"type,omitempty"`
+	Text      string `json:"text,omitempty"`
+	Data      string `json:"data,omitempty"`
+	Format    string `json:"format,omitempty"`
+	Summary   string `json:"summary,omitempty"`
+	Signature string `json:"signature,omitempty"`
+	Index     int    `json:"index"`
+}
+
+// ReasoningData represents reasoning data from Vercel AI Gateway response.
+type ReasoningData struct {
+	Reasoning        string            `json:"reasoning,omitempty"`
+	ReasoningDetails []ReasoningDetail `json:"reasoning_details,omitempty"`
+}
+
+// ReasoningEffortOption creates a pointer to a ReasoningEffort value.
+func ReasoningEffortOption(e ReasoningEffort) *ReasoningEffort {
+	return &e
+}
+
+// NewProviderOptions creates new provider options for Vercel.
+func NewProviderOptions(opts *ProviderOptions) fantasy.ProviderOptions {
+	return fantasy.ProviderOptions{
+		Name: opts,
+	}
+}
+
+// ParseOptions parses provider options from a map for Vercel.
+func ParseOptions(data map[string]any) (*ProviderOptions, error) {
+	var options ProviderOptions
+	if err := fantasy.ParseOptions(data, &options); err != nil {
+		return nil, err
+	}
+	return &options, nil
+}

providers/vercel/vercel.go 🔗

@@ -0,0 +1,114 @@
+// Package vercel provides an implementation of the fantasy AI SDK for Vercel AI Gateway.
+package vercel
+
+import (
+	"charm.land/fantasy"
+	"charm.land/fantasy/providers/openai"
+	"github.com/openai/openai-go/v2/option"
+)
+
+type options struct {
+	openaiOptions        []openai.Option
+	languageModelOptions []openai.LanguageModelOption
+	sdkOptions           []option.RequestOption
+	objectMode           fantasy.ObjectMode
+}
+
+const (
+	// DefaultURL is the default URL for the Vercel AI Gateway API.
+	DefaultURL = "https://ai-gateway.vercel.sh/v1"
+	// Name is the name of the Vercel provider.
+	Name = "vercel"
+)
+
+// Option defines a function that configures Vercel provider options.
+type Option = func(*options)
+
+// New creates a new Vercel AI Gateway provider with the given options.
+func New(opts ...Option) (fantasy.Provider, error) {
+	providerOptions := options{
+		openaiOptions: []openai.Option{
+			openai.WithName(Name),
+			openai.WithBaseURL(DefaultURL),
+		},
+		languageModelOptions: []openai.LanguageModelOption{
+			openai.WithLanguageModelPrepareCallFunc(languagePrepareModelCall),
+			openai.WithLanguageModelUsageFunc(languageModelUsage),
+			openai.WithLanguageModelStreamUsageFunc(languageModelStreamUsage),
+			openai.WithLanguageModelStreamExtraFunc(languageModelStreamExtra),
+			openai.WithLanguageModelExtraContentFunc(languageModelExtraContent),
+			openai.WithLanguageModelToPromptFunc(languageModelToPrompt),
+		},
+		objectMode: fantasy.ObjectModeTool, // Default to tool mode for vercel
+	}
+	for _, o := range opts {
+		o(&providerOptions)
+	}
+
+	// Handle object mode: convert unsupported modes to tool
+	// Vercel AI Gateway doesn't support native JSON mode, so we use tool or text
+	objectMode := providerOptions.objectMode
+	if objectMode == fantasy.ObjectModeAuto || objectMode == fantasy.ObjectModeJSON {
+		objectMode = fantasy.ObjectModeTool
+	}
+
+	providerOptions.openaiOptions = append(
+		providerOptions.openaiOptions,
+		openai.WithSDKOptions(providerOptions.sdkOptions...),
+		openai.WithLanguageModelOptions(providerOptions.languageModelOptions...),
+		openai.WithObjectMode(objectMode),
+	)
+	return openai.New(providerOptions.openaiOptions...)
+}
+
+// WithAPIKey sets the API key for the Vercel provider.
+func WithAPIKey(apiKey string) Option {
+	return func(o *options) {
+		o.openaiOptions = append(o.openaiOptions, openai.WithAPIKey(apiKey))
+	}
+}
+
+// WithBaseURL sets the base URL for the Vercel provider.
+func WithBaseURL(url string) Option {
+	return func(o *options) {
+		o.openaiOptions = append(o.openaiOptions, openai.WithBaseURL(url))
+	}
+}
+
+// WithName sets the name for the Vercel provider.
+func WithName(name string) Option {
+	return func(o *options) {
+		o.openaiOptions = append(o.openaiOptions, openai.WithName(name))
+	}
+}
+
+// WithHeaders sets the headers for the Vercel provider.
+func WithHeaders(headers map[string]string) Option {
+	return func(o *options) {
+		o.openaiOptions = append(o.openaiOptions, openai.WithHeaders(headers))
+	}
+}
+
+// WithHTTPClient sets the HTTP client for the Vercel provider.
+func WithHTTPClient(client option.HTTPClient) Option {
+	return func(o *options) {
+		o.openaiOptions = append(o.openaiOptions, openai.WithHTTPClient(client))
+	}
+}
+
+// WithSDKOptions sets the SDK options for the Vercel provider.
+func WithSDKOptions(opts ...option.RequestOption) Option {
+	return func(o *options) {
+		o.sdkOptions = append(o.sdkOptions, opts...)
+	}
+}
+
+// WithObjectMode sets the object generation mode for the Vercel provider.
+// Supported modes: ObjectModeTool, ObjectModeText.
+// ObjectModeAuto and ObjectModeJSON are automatically converted to ObjectModeTool
+// since Vercel AI Gateway doesn't support native JSON mode.
+func WithObjectMode(om fantasy.ObjectMode) Option {
+	return func(o *options) {
+		o.objectMode = om
+	}
+}

providertests/testdata/TestVercelCommon/claude-sonnet-4/multi_tool.yaml 🔗

@@ -0,0 +1,63 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 830
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant. CRITICAL: Always use both add and multiply at the same time ALWAYS.","role":"system"},{"content":"Add and multiply the number 2 and 3","role":"user"}],"model":"anthropic/claude-sonnet-4","max_tokens":4000,"tool_choice":"auto","tools":[{"function":{"name":"add","strict":false,"description":"Add two numbers","parameters":{"properties":{"a":{"description":"first number","type":"integer"},"b":{"description":"second number","type":"integer"}},"required":["a","b"],"type":"object"}},"type":"function"},{"function":{"name":"multiply","strict":false,"description":"Multiply two numbers","parameters":{"properties":{"a":{"description":"first number","type":"integer"},"b":{"description":"second number","type":"integer"}},"required":["a","b"],"type":"object"}},"type":"function"}]}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    uncompressed: true

providertests/testdata/TestVercelCommon/claude-sonnet-4/multi_tool_streaming.yaml 🔗

@@ -0,0 +1,113 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 867
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant. Always use both add and multiply at the same time.","role":"system"},{"content":"Add and multiply the number 2 and 3","role":"user"}],"model":"anthropic/claude-sonnet-4","max_tokens":4000,"stream_options":{"include_usage":true},"tool_choice":"auto","tools":[{"function":{"name":"add","strict":false,"description":"Add two numbers","parameters":{"properties":{"a":{"description":"first number","type":"integer"},"b":{"description":"second number","type":"integer"}},"required":["a","b"],"type":"object"}},"type":"function"},{"function":{"name":"multiply","strict":false,"description":"Multiply two numbers","parameters":{"properties":{"a":{"description":"first number","type":"integer"},"b":{"description":"second number","type":"integer"}},"required":["a","b"],"type":"object"}},"type":"function"}],"stream":true}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    body: |+
+      data: {"id":"gen_01KGF37RMDX4SKJG4WVQMM9169","object":"chat.completion.chunk","created":1770033244,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"role":"assistant"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_hia2ehn86q"}
+
+      data: {"id":"gen_01KGF37RMDX4SKJG4WVQMM9169","object":"chat.completion.chunk","created":1770033244,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":"I"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_hia2ehn86q"}
+
+      data: {"id":"gen_01KGF37RMDX4SKJG4WVQMM9169","object":"chat.completion.chunk","created":1770033244,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":"'ll add an"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_hia2ehn86q"}
+
+      data: {"id":"gen_01KGF37RMDX4SKJG4WVQMM9169","object":"chat.completion.chunk","created":1770033244,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":"d multiply the"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_hia2ehn86q"}
+
+      data: {"id":"gen_01KGF37RMDX4SKJG4WVQMM9169","object":"chat.completion.chunk","created":1770033244,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":" numbers 2 and 3 "},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_hia2ehn86q"}
+
+      data: {"id":"gen_01KGF37RMDX4SKJG4WVQMM9169","object":"chat.completion.chunk","created":1770033244,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":"for you."},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_hia2ehn86q"}
+
+      data: {"id":"gen_01KGF37RMDX4SKJG4WVQMM9169","object":"chat.completion.chunk","created":1770033244,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"id":"toolu_0135npjMLttuPFBue5jmkrQ7","type":"function","function":{"name":"add","arguments":""}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_hia2ehn86q"}
+
+      data: {"id":"gen_01KGF37RMDX4SKJG4WVQMM9169","object":"chat.completion.chunk","created":1770033244,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\"a\": 2"}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_hia2ehn86q"}
+
+      data: {"id":"gen_01KGF37RMDX4SKJG4WVQMM9169","object":"chat.completion.chunk","created":1770033244,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":", \""}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_hia2ehn86q"}
+
+      data: {"id":"gen_01KGF37RMDX4SKJG4WVQMM9169","object":"chat.completion.chunk","created":1770033244,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"b\": 3}"}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_hia2ehn86q"}
+
+      data: {"id":"gen_01KGF37RMDX4SKJG4WVQMM9169","object":"chat.completion.chunk","created":1770033244,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"id":"toolu_0158fwcmJxxApeN8LydKZFKq","type":"function","function":{"name":"multiply","arguments":""}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_hia2ehn86q"}
+
+      data: {"id":"gen_01KGF37RMDX4SKJG4WVQMM9169","object":"chat.completion.chunk","created":1770033244,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"{\"a\": 2"}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_hia2ehn86q"}
+
+      data: {"id":"gen_01KGF37RMDX4SKJG4WVQMM9169","object":"chat.completion.chunk","created":1770033244,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":", \"b"}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_hia2ehn86q"}
+
+      data: {"id":"gen_01KGF37RMDX4SKJG4WVQMM9169","object":"chat.completion.chunk","created":1770033244,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"\": 3}"}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_hia2ehn86q"}
+

providertests/testdata/TestVercelCommon/claude-sonnet-4/simple.yaml 🔗

@@ -0,0 +1,33 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 175
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant","role":"system"},{"content":"Say hi in Portuguese","role":"user"}],"model":"anthropic/claude-sonnet-4","max_tokens":4000}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    uncompressed: true

providertests/testdata/TestVercelCommon/claude-sonnet-4/simple_streaming.yaml 🔗

@@ -0,0 +1,62 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 229
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant","role":"system"},{"content":"Say hi in Portuguese","role":"user"}],"model":"anthropic/claude-sonnet-4","max_tokens":4000,"stream_options":{"include_usage":true},"stream":true}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    body: |+
+      data: {"id":"gen_01KGF379703450NEHTHYWRXNWC","object":"chat.completion.chunk","created":1770033227,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"role":"assistant"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5a0fyewqxl"}
+
+      data: {"id":"gen_01KGF379703450NEHTHYWRXNWC","object":"chat.completion.chunk","created":1770033227,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":"Oi!"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5a0fyewqxl"}
+
+      data: {"id":"gen_01KGF379703450NEHTHYWRXNWC","object":"chat.completion.chunk","created":1770033227,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":" "},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5a0fyewqxl"}
+
+      data: {"id":"gen_01KGF379703450NEHTHYWRXNWC","object":"chat.completion.chunk","created":1770033227,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":"\n\n("},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5a0fyewqxl"}
+
+      data: {"id":"gen_01KGF379703450NEHTHYWRXNWC","object":"chat.completion.chunk","created":1770033227,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":"You"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5a0fyewqxl"}
+
+      data: {"id":"gen_01KGF379703450NEHTHYWRXNWC","object":"chat.completion.chunk","created":1770033227,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":" can"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5a0fyewqxl"}
+
+      data: {"id":"gen_01KGF379703450NEHTHYWRXNWC","object":"chat.completion.chunk","created":1770033227,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":" also say \"Olá!\" which"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5a0fyewqxl"}
+
+      data: {"id":"gen_01KGF379703450NEHTHYWRXNWC","object":"chat.completion.chunk","created":1770033227,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":" is"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5a0fyewqxl"}
+
+      data: {"id":"gen_01KGF379703450NEHTHYWRXNWC","object":"chat.completion.chunk","created":1770033227,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":" another"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5a0fyewqxl"}
+
+      data: {"id":"gen_01KGF379703450NEHTHYWRXNWC","object":"chat.completion.chunk","created":1770033227,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":" common way"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5a0fyewqxl"}
+
+      data: {"id":"gen_01KGF379703450NEHTHYWRXNWC","object":"chat.completion.chunk","created":1770033227,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":" to say hi"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5a0fyewqxl"}
+
+      data: {"id":"gen_01KGF379703450NEHTHYWRXNWC","object":"chat.completion.chunk","created":1770033227,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":" in"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5a0fyewqxl"}
+
+      data: {"id":"gen_01KGF379703450NEHTHYWRXNWC","object":"chat.completion.chunk","created":1770033227,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":" Portuguese)"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5a0fyewqxl"}
+

providertests/testdata/TestVercelCommon/claude-sonnet-4/tool.yaml 🔗

@@ -0,0 +1,63 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 467
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant","role":"system"},{"content":"What''s the weather in Florence,Italy?","role":"user"}],"model":"anthropic/claude-sonnet-4","max_tokens":4000,"tool_choice":"auto","tools":[{"function":{"name":"weather","strict":false,"description":"Get weather information for a location","parameters":{"properties":{"location":{"description":"the city","type":"string"}},"required":["location"],"type":"object"}},"type":"function"}]}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    uncompressed: true

providertests/testdata/TestVercelCommon/claude-sonnet-4/tool_streaming.yaml 🔗

@@ -0,0 +1,121 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 521
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant","role":"system"},{"content":"What''s the weather in Florence,Italy?","role":"user"}],"model":"anthropic/claude-sonnet-4","max_tokens":4000,"stream_options":{"include_usage":true},"tool_choice":"auto","tools":[{"function":{"name":"weather","strict":false,"description":"Get weather information for a location","parameters":{"properties":{"location":{"description":"the city","type":"string"}},"required":["location"],"type":"object"}},"type":"function"}],"stream":true}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    body: |+
+      data: {"id":"gen_01KGF37F6TEH1JWRE2BABW1XV7","object":"chat.completion.chunk","created":1770033234,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"role":"assistant"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_22ov8ctw6a"}
+
+      data: {"id":"gen_01KGF37F6TEH1JWRE2BABW1XV7","object":"chat.completion.chunk","created":1770033234,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":"I'll get the weather information for Florence"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_22ov8ctw6a"}
+
+      data: {"id":"gen_01KGF37F6TEH1JWRE2BABW1XV7","object":"chat.completion.chunk","created":1770033234,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":", Italy for you."},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_22ov8ctw6a"}
+
+      data: {"id":"gen_01KGF37F6TEH1JWRE2BABW1XV7","object":"chat.completion.chunk","created":1770033234,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"id":"toolu_01P61EhnucuCA3gRE8rXpYrt","type":"function","function":{"name":"weather","arguments":""}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_22ov8ctw6a"}
+
+      data: {"id":"gen_01KGF37F6TEH1JWRE2BABW1XV7","object":"chat.completion.chunk","created":1770033234,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\"location\""}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_22ov8ctw6a"}
+
+      data: {"id":"gen_01KGF37F6TEH1JWRE2BABW1XV7","object":"chat.completion.chunk","created":1770033234,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":": \""}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_22ov8ctw6a"}
+
+      data: {"id":"gen_01KGF37F6TEH1JWRE2BABW1XV7","object":"chat.completion.chunk","created":1770033234,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"Florence,"}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_22ov8ctw6a"}
+
+      data: {"id":"gen_01KGF37F6TEH1JWRE2BABW1XV7","object":"chat.completion.chunk","created":1770033234,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" Ita"}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_22ov8ctw6a"}
+
+      data: {"id":"gen_01KGF37F6TEH1JWRE2BABW1XV7","object":"chat.completion.chunk","created":1770033234,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"ly\"}"}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_22ov8ctw6a"}
+

providertests/testdata/TestVercelCommon/gemini-2.5-flash/multi_tool.yaml 🔗

@@ -0,0 +1,63 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 828
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant. CRITICAL: Always use both add and multiply at the same time ALWAYS.","role":"system"},{"content":"Add and multiply the number 2 and 3","role":"user"}],"model":"google/gemini-2.5-flash","max_tokens":4000,"tool_choice":"auto","tools":[{"function":{"name":"add","strict":false,"description":"Add two numbers","parameters":{"properties":{"a":{"description":"first number","type":"integer"},"b":{"description":"second number","type":"integer"}},"required":["a","b"],"type":"object"}},"type":"function"},{"function":{"name":"multiply","strict":false,"description":"Multiply two numbers","parameters":{"properties":{"a":{"description":"first number","type":"integer"},"b":{"description":"second number","type":"integer"}},"required":["a","b"],"type":"object"}},"type":"function"}]}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    uncompressed: true

providertests/testdata/TestVercelCommon/gemini-2.5-flash/multi_tool_streaming.yaml 🔗

@@ -0,0 +1,87 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 865
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant. Always use both add and multiply at the same time.","role":"system"},{"content":"Add and multiply the number 2 and 3","role":"user"}],"model":"google/gemini-2.5-flash","max_tokens":4000,"stream_options":{"include_usage":true},"tool_choice":"auto","tools":[{"function":{"name":"add","strict":false,"description":"Add two numbers","parameters":{"properties":{"a":{"description":"first number","type":"integer"},"b":{"description":"second number","type":"integer"}},"required":["a","b"],"type":"object"}},"type":"function"},{"function":{"name":"multiply","strict":false,"description":"Multiply two numbers","parameters":{"properties":{"a":{"description":"first number","type":"integer"},"b":{"description":"second number","type":"integer"}},"required":["a","b"],"type":"object"}},"type":"function"}],"stream":true}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    body: |+
+      data: {"id":"gen_01KGF383DF2E9FSJX95AA3ZED7","object":"chat.completion.chunk","created":1770033254,"model":"google/gemini-2.5-flash","choices":[{"index":0,"delta":{"role":"assistant"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_nkvitg7moc"}
+
+      data: {"id":"gen_01KGF383DF2E9FSJX95AA3ZED7","object":"chat.completion.chunk","created":1770033254,"model":"google/gemini-2.5-flash","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"id":"S66o9rnUIliVpcHx","type":"function","function":{"name":"add","arguments":""}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_nkvitg7moc"}
+
+      data: {"id":"gen_01KGF383DF2E9FSJX95AA3ZED7","object":"chat.completion.chunk","created":1770033254,"model":"google/gemini-2.5-flash","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\"b\":3,\"a\":2}"}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_nkvitg7moc"}
+
+      data: {"id":"gen_01KGF383DF2E9FSJX95AA3ZED7","object":"chat.completion.chunk","created":1770033254,"model":"google/gemini-2.5-flash","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"id":"6e0NXuEtzAHkTRep","type":"function","function":{"name":"multiply","arguments":""}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_nkvitg7moc"}
+
+      data: {"id":"gen_01KGF383DF2E9FSJX95AA3ZED7","object":"chat.completion.chunk","created":1770033254,"model":"google/gemini-2.5-flash","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"{\"a\":2,\"b\":3}"}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_nkvitg7moc"}
+

providertests/testdata/TestVercelCommon/gemini-2.5-flash/simple.yaml 🔗

@@ -0,0 +1,33 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 173
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant","role":"system"},{"content":"Say hi in Portuguese","role":"user"}],"model":"google/gemini-2.5-flash","max_tokens":4000}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    uncompressed: true

providertests/testdata/TestVercelCommon/gemini-2.5-flash/simple_streaming.yaml 🔗

@@ -0,0 +1,40 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 227
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant","role":"system"},{"content":"Say hi in Portuguese","role":"user"}],"model":"google/gemini-2.5-flash","max_tokens":4000,"stream_options":{"include_usage":true},"stream":true}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    body: |+
+      data: {"id":"gen_01KGF37XVPQFTEF88B3MN5VGHK","object":"chat.completion.chunk","created":1770033248,"model":"google/gemini-2.5-flash","choices":[{"index":0,"delta":{"role":"assistant"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_1hjld5wcik"}
+
+      data: {"id":"gen_01KGF37XVPQFTEF88B3MN5VGHK","object":"chat.completion.chunk","created":1770033248,"model":"google/gemini-2.5-flash","choices":[{"index":0,"delta":{"content":"Olá!"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_1hjld5wcik"}
+

providertests/testdata/TestVercelCommon/gemini-2.5-flash/tool.yaml 🔗

@@ -0,0 +1,63 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 465
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant","role":"system"},{"content":"What''s the weather in Florence,Italy?","role":"user"}],"model":"google/gemini-2.5-flash","max_tokens":4000,"tool_choice":"auto","tools":[{"function":{"name":"weather","strict":false,"description":"Get weather information for a location","parameters":{"properties":{"location":{"description":"the city","type":"string"}},"required":["location"],"type":"object"}},"type":"function"}]}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    uncompressed: true

providertests/testdata/TestVercelCommon/gemini-2.5-flash/tool_streaming.yaml 🔗

@@ -0,0 +1,81 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 519
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant","role":"system"},{"content":"What''s the weather in Florence,Italy?","role":"user"}],"model":"google/gemini-2.5-flash","max_tokens":4000,"stream_options":{"include_usage":true},"tool_choice":"auto","tools":[{"function":{"name":"weather","strict":false,"description":"Get weather information for a location","parameters":{"properties":{"location":{"description":"the city","type":"string"}},"required":["location"],"type":"object"}},"type":"function"}],"stream":true}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    body: |+
+      data: {"id":"gen_01KGF3803YBNDA2M8B7319DJ82","object":"chat.completion.chunk","created":1770033251,"model":"google/gemini-2.5-flash","choices":[{"index":0,"delta":{"role":"assistant"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_7in70gabwf"}
+
+      data: {"id":"gen_01KGF3803YBNDA2M8B7319DJ82","object":"chat.completion.chunk","created":1770033251,"model":"google/gemini-2.5-flash","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"id":"g8BRNPzm2XAHjElR","type":"function","function":{"name":"weather","arguments":""}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_7in70gabwf"}
+
+      data: {"id":"gen_01KGF3803YBNDA2M8B7319DJ82","object":"chat.completion.chunk","created":1770033251,"model":"google/gemini-2.5-flash","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\"location\":\"Florence,Italy\"}"}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_7in70gabwf"}
+

providertests/testdata/TestVercelCommon/gemini-3-pro-preview/multi_tool.yaml 🔗

@@ -0,0 +1,63 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 832
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant. CRITICAL: Always use both add and multiply at the same time ALWAYS.","role":"system"},{"content":"Add and multiply the number 2 and 3","role":"user"}],"model":"google/gemini-3-pro-preview","max_tokens":4000,"tool_choice":"auto","tools":[{"function":{"name":"add","strict":false,"description":"Add two numbers","parameters":{"properties":{"a":{"description":"first number","type":"integer"},"b":{"description":"second number","type":"integer"}},"required":["a","b"],"type":"object"}},"type":"function"},{"function":{"name":"multiply","strict":false,"description":"Multiply two numbers","parameters":{"properties":{"a":{"description":"first number","type":"integer"},"b":{"description":"second number","type":"integer"}},"required":["a","b"],"type":"object"}},"type":"function"}]}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    uncompressed: true

providertests/testdata/TestVercelCommon/gemini-3-pro-preview/multi_tool_streaming.yaml 🔗

@@ -0,0 +1,85 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 869
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant. Always use both add and multiply at the same time.","role":"system"},{"content":"Add and multiply the number 2 and 3","role":"user"}],"model":"google/gemini-3-pro-preview","max_tokens":4000,"stream_options":{"include_usage":true},"tool_choice":"auto","tools":[{"function":{"name":"add","strict":false,"description":"Add two numbers","parameters":{"properties":{"a":{"description":"first number","type":"integer"},"b":{"description":"second number","type":"integer"}},"required":["a","b"],"type":"object"}},"type":"function"},{"function":{"name":"multiply","strict":false,"description":"Multiply two numbers","parameters":{"properties":{"a":{"description":"first number","type":"integer"},"b":{"description":"second number","type":"integer"}},"required":["a","b"],"type":"object"}},"type":"function"}],"stream":true}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    body: |+
+      data: {"id":"gen_01KGF39SRSQ5CWVF17GR4P29CK","object":"chat.completion.chunk","created":1770033312,"model":"google/gemini-3-pro-preview","choices":[{"index":0,"delta":{"role":"assistant"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_4r8izhsq38"}
+
+      data: {"id":"gen_01KGF39SRSQ5CWVF17GR4P29CK","object":"chat.completion.chunk","created":1770033312,"model":"google/gemini-3-pro-preview","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"id":"Oxu2MENhE0ZsrlGm","type":"function","function":{"name":"add","arguments":""}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_4r8izhsq38"}
+
+      data: {"id":"gen_01KGF39SRSQ5CWVF17GR4P29CK","object":"chat.completion.chunk","created":1770033312,"model":"google/gemini-3-pro-preview","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\"b\":3,\"a\":2}"}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_4r8izhsq38"}
+
+      data: {"id":"gen_01KGF39SRSQ5CWVF17GR4P29CK","object":"chat.completion.chunk","created":1770033312,"model":"google/gemini-3-pro-preview","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"id":"jHpVWOzg44py3a4h","type":"function","function":{"name":"multiply","arguments":""}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_4r8izhsq38"}
+
+      data: {"id":"gen_01KGF39SRSQ5CWVF17GR4P29CK","object":"chat.completion.chunk","created":1770033312,"model":"google/gemini-3-pro-preview","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"{\"b\":3,\"a\":2}"}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_4r8izhsq38"}
+

providertests/testdata/TestVercelCommon/gemini-3-pro-preview/simple.yaml 🔗

@@ -0,0 +1,33 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 177
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant","role":"system"},{"content":"Say hi in Portuguese","role":"user"}],"model":"google/gemini-3-pro-preview","max_tokens":4000}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    uncompressed: true

providertests/testdata/TestVercelCommon/gemini-3-pro-preview/simple_streaming.yaml 🔗

@@ -0,0 +1,42 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 231
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant","role":"system"},{"content":"Say hi in Portuguese","role":"user"}],"model":"google/gemini-3-pro-preview","max_tokens":4000,"stream_options":{"include_usage":true},"stream":true}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    body: |+
+      data: {"id":"gen_01KGF391Q1MADSMMMS7RN0MPJX","object":"chat.completion.chunk","created":1770033289,"model":"google/gemini-3-pro-preview","choices":[{"index":0,"delta":{"role":"assistant"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_s005gr6r3v"}
+
+      data: {"id":"gen_01KGF391Q1MADSMMMS7RN0MPJX","object":"chat.completion.chunk","created":1770033289,"model":"google/gemini-3-pro-preview","choices":[{"index":0,"delta":{"content":"The most common way to say \"hi\" in Portuguese is **\"Oi\"**.\n\nYou can also say **\"Olá\"** (which is more like \"hello\")."},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_s005gr6r3v"}
+

providertests/testdata/TestVercelCommon/gemini-3-pro-preview/tool.yaml 🔗

@@ -0,0 +1,63 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 469
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant","role":"system"},{"content":"What''s the weather in Florence,Italy?","role":"user"}],"model":"google/gemini-3-pro-preview","max_tokens":4000,"tool_choice":"auto","tools":[{"function":{"name":"weather","strict":false,"description":"Get weather information for a location","parameters":{"properties":{"location":{"description":"the city","type":"string"}},"required":["location"],"type":"object"}},"type":"function"}]}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    uncompressed: true

providertests/testdata/TestVercelCommon/gemini-3-pro-preview/tool_streaming.yaml 🔗

@@ -0,0 +1,81 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 523
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant","role":"system"},{"content":"What''s the weather in Florence,Italy?","role":"user"}],"model":"google/gemini-3-pro-preview","max_tokens":4000,"stream_options":{"include_usage":true},"tool_choice":"auto","tools":[{"function":{"name":"weather","strict":false,"description":"Get weather information for a location","parameters":{"properties":{"location":{"description":"the city","type":"string"}},"required":["location"],"type":"object"}},"type":"function"}],"stream":true}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    body: |+
+      data: {"id":"gen_01KGF39CMWTBAYWG62BTDA8EX5","object":"chat.completion.chunk","created":1770033298,"model":"google/gemini-3-pro-preview","choices":[{"index":0,"delta":{"role":"assistant"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_n9t24sd6so"}
+
+      data: {"id":"gen_01KGF39CMWTBAYWG62BTDA8EX5","object":"chat.completion.chunk","created":1770033298,"model":"google/gemini-3-pro-preview","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"id":"mbK8hEFBfrxbyEg3","type":"function","function":{"name":"weather","arguments":""}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_n9t24sd6so"}
+
+      data: {"id":"gen_01KGF39CMWTBAYWG62BTDA8EX5","object":"chat.completion.chunk","created":1770033298,"model":"google/gemini-3-pro-preview","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\"location\":\"Florence, Italy\"}"}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_n9t24sd6so"}
+

providertests/testdata/TestVercelCommon/gpt-5/multi_tool.yaml 🔗

@@ -0,0 +1,63 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 828
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant. CRITICAL: Always use both add and multiply at the same time ALWAYS.","role":"system"},{"content":"Add and multiply the number 2 and 3","role":"user"}],"model":"openai/gpt-5","max_completion_tokens":4000,"tool_choice":"auto","tools":[{"function":{"name":"add","strict":false,"description":"Add two numbers","parameters":{"properties":{"a":{"description":"first number","type":"integer"},"b":{"description":"second number","type":"integer"}},"required":["a","b"],"type":"object"}},"type":"function"},{"function":{"name":"multiply","strict":false,"description":"Multiply two numbers","parameters":{"properties":{"a":{"description":"first number","type":"integer"},"b":{"description":"second number","type":"integer"}},"required":["a","b"],"type":"object"}},"type":"function"}]}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    uncompressed: true

providertests/testdata/TestVercelCommon/gpt-5/multi_tool_streaming.yaml 🔗

@@ -0,0 +1,101 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 865
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant. Always use both add and multiply at the same time.","role":"system"},{"content":"Add and multiply the number 2 and 3","role":"user"}],"model":"openai/gpt-5","max_completion_tokens":4000,"stream_options":{"include_usage":true},"tool_choice":"auto","tools":[{"function":{"name":"add","strict":false,"description":"Add two numbers","parameters":{"properties":{"a":{"description":"first number","type":"integer"},"b":{"description":"second number","type":"integer"}},"required":["a","b"],"type":"object"}},"type":"function"},{"function":{"name":"multiply","strict":false,"description":"Multiply two numbers","parameters":{"properties":{"a":{"description":"first number","type":"integer"},"b":{"description":"second number","type":"integer"}},"required":["a","b"],"type":"object"}},"type":"function"}],"stream":true}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    body: |+
+      data: {"id":"gen_01KGF38QHY01RX510GTJVENPPB","object":"chat.completion.chunk","created":1770033274,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"role":"assistant"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_lwams5xjm0"}
+

providertests/testdata/TestVercelCommon/gpt-5/simple.yaml 🔗

@@ -0,0 +1,33 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 173
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant","role":"system"},{"content":"Say hi in Portuguese","role":"user"}],"model":"openai/gpt-5","max_completion_tokens":4000}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    uncompressed: true

providertests/testdata/TestVercelCommon/gpt-5/simple_streaming.yaml 🔗

@@ -0,0 +1,44 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 227
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant","role":"system"},{"content":"Say hi in Portuguese","role":"user"}],"model":"openai/gpt-5","max_completion_tokens":4000,"stream_options":{"include_usage":true},"stream":true}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    body: |+
+      data: {"id":"gen_01KGF387XY51D0GDDRCM6KB8NB","object":"chat.completion.chunk","created":1770033258,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"role":"assistant"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_tmpp8eb0yq"}
+

providertests/testdata/TestVercelCommon/gpt-5/tool.yaml 🔗

@@ -0,0 +1,63 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 465
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant","role":"system"},{"content":"What''s the weather in Florence,Italy?","role":"user"}],"model":"openai/gpt-5","max_completion_tokens":4000,"tool_choice":"auto","tools":[{"function":{"name":"weather","strict":false,"description":"Get weather information for a location","parameters":{"properties":{"location":{"description":"the city","type":"string"}},"required":["location"],"type":"object"}},"type":"function"}]}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    uncompressed: true

providertests/testdata/TestVercelCommon/gpt-5/tool_streaming.yaml 🔗

@@ -0,0 +1,119 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 519
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant","role":"system"},{"content":"What''s the weather in Florence,Italy?","role":"user"}],"model":"openai/gpt-5","max_completion_tokens":4000,"stream_options":{"include_usage":true},"tool_choice":"auto","tools":[{"function":{"name":"weather","strict":false,"description":"Get weather information for a location","parameters":{"properties":{"location":{"description":"the city","type":"string"}},"required":["location"],"type":"object"}},"type":"function"}],"stream":true}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    body: |+
+      data: {"id":"gen_01KGF38FRCNWV564Q8599HRRAW","object":"chat.completion.chunk","created":1770033266,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"role":"assistant"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_147br1mgel"}
+

providertests/testdata/TestVercelCommonWithAnthropicCache/claude-sonnet-4/multi_tool.yaml 🔗

@@ -0,0 +1,63 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 904
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant. CRITICAL: Always use both add and multiply at the same time ALWAYS.","role":"system","cache_control":{"type":"ephemeral"}},{"content":"Add and multiply the number 2 and 3","role":"user","cache_control":{"type":"ephemeral"}}],"model":"anthropic/claude-sonnet-4","max_tokens":4000,"tool_choice":"auto","tools":[{"function":{"name":"add","strict":false,"description":"Add two numbers","parameters":{"properties":{"a":{"description":"first number","type":"integer"},"b":{"description":"second number","type":"integer"}},"required":["a","b"],"type":"object"}},"type":"function"},{"function":{"name":"multiply","strict":false,"description":"Multiply two numbers","parameters":{"properties":{"a":{"description":"first number","type":"integer"},"b":{"description":"second number","type":"integer"}},"required":["a","b"],"type":"object"}},"type":"function"}]}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    uncompressed: true

providertests/testdata/TestVercelCommonWithAnthropicCache/claude-sonnet-4/multi_tool_streaming.yaml 🔗

@@ -0,0 +1,119 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 941
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant. Always use both add and multiply at the same time.","role":"system","cache_control":{"type":"ephemeral"}},{"content":"Add and multiply the number 2 and 3","role":"user","cache_control":{"type":"ephemeral"}}],"model":"anthropic/claude-sonnet-4","max_tokens":4000,"stream_options":{"include_usage":true},"tool_choice":"auto","tools":[{"function":{"name":"add","strict":false,"description":"Add two numbers","parameters":{"properties":{"a":{"description":"first number","type":"integer"},"b":{"description":"second number","type":"integer"}},"required":["a","b"],"type":"object"}},"type":"function"},{"function":{"name":"multiply","strict":false,"description":"Multiply two numbers","parameters":{"properties":{"a":{"description":"first number","type":"integer"},"b":{"description":"second number","type":"integer"}},"required":["a","b"],"type":"object"}},"type":"function"}],"stream":true}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    body: |+
+      data: {"id":"gen_01KGF3AK848QW52MG188A9XZRA","object":"chat.completion.chunk","created":1770033336,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"role":"assistant"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_kk4h3svyf"}
+
+      data: {"id":"gen_01KGF3AK848QW52MG188A9XZRA","object":"chat.completion.chunk","created":1770033336,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":"I"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_kk4h3svyf"}
+
+      data: {"id":"gen_01KGF3AK848QW52MG188A9XZRA","object":"chat.completion.chunk","created":1770033336,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":"'ll"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_kk4h3svyf"}
+
+      data: {"id":"gen_01KGF3AK848QW52MG188A9XZRA","object":"chat.completion.chunk","created":1770033336,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":" ad"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_kk4h3svyf"}
+
+      data: {"id":"gen_01KGF3AK848QW52MG188A9XZRA","object":"chat.completion.chunk","created":1770033336,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":"d an"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_kk4h3svyf"}
+
+      data: {"id":"gen_01KGF3AK848QW52MG188A9XZRA","object":"chat.completion.chunk","created":1770033336,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":"d multiply the"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_kk4h3svyf"}
+
+      data: {"id":"gen_01KGF3AK848QW52MG188A9XZRA","object":"chat.completion.chunk","created":1770033336,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":" numbers 2 and 3 "},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_kk4h3svyf"}
+
+      data: {"id":"gen_01KGF3AK848QW52MG188A9XZRA","object":"chat.completion.chunk","created":1770033336,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":"for you."},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_kk4h3svyf"}
+
+      data: {"id":"gen_01KGF3AK848QW52MG188A9XZRA","object":"chat.completion.chunk","created":1770033336,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"id":"toolu_01EbUPM9AHLKandFdpD3hR2d","type":"function","function":{"name":"add","arguments":""}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_kk4h3svyf"}
+
+      data: {"id":"gen_01KGF3AK848QW52MG188A9XZRA","object":"chat.completion.chunk","created":1770033336,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\"a\": 2"}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_kk4h3svyf"}
+
+      data: {"id":"gen_01KGF3AK848QW52MG188A9XZRA","object":"chat.completion.chunk","created":1770033336,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":", \"b\""}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_kk4h3svyf"}
+
+      data: {"id":"gen_01KGF3AK848QW52MG188A9XZRA","object":"chat.completion.chunk","created":1770033336,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":": 3}"}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_kk4h3svyf"}
+
+      data: {"id":"gen_01KGF3AK848QW52MG188A9XZRA","object":"chat.completion.chunk","created":1770033336,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"id":"toolu_01WJxb3WgmL33DJCq2hw4DFL","type":"function","function":{"name":"multiply","arguments":""}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_kk4h3svyf"}
+
+      data: {"id":"gen_01KGF3AK848QW52MG188A9XZRA","object":"chat.completion.chunk","created":1770033336,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"{\"a\": "}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_kk4h3svyf"}
+
+      data: {"id":"gen_01KGF3AK848QW52MG188A9XZRA","object":"chat.completion.chunk","created":1770033336,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"2"}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_kk4h3svyf"}
+
+      data: {"id":"gen_01KGF3AK848QW52MG188A9XZRA","object":"chat.completion.chunk","created":1770033336,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":", "}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_kk4h3svyf"}
+
+      data: {"id":"gen_01KGF3AK848QW52MG188A9XZRA","object":"chat.completion.chunk","created":1770033336,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"\"b\""}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_kk4h3svyf"}
+
+      data: {"id":"gen_01KGF3AK848QW52MG188A9XZRA","object":"chat.completion.chunk","created":1770033336,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":": 3}"}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_kk4h3svyf"}
+

providertests/testdata/TestVercelCommonWithAnthropicCache/claude-sonnet-4/simple.yaml 🔗

@@ -0,0 +1,33 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 249
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant","role":"system","cache_control":{"type":"ephemeral"}},{"content":"Say hi in Portuguese","role":"user","cache_control":{"type":"ephemeral"}}],"model":"anthropic/claude-sonnet-4","max_tokens":4000}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    uncompressed: true

providertests/testdata/TestVercelCommonWithAnthropicCache/claude-sonnet-4/simple_streaming.yaml 🔗

@@ -0,0 +1,60 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 303
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant","role":"system","cache_control":{"type":"ephemeral"}},{"content":"Say hi in Portuguese","role":"user","cache_control":{"type":"ephemeral"}}],"model":"anthropic/claude-sonnet-4","max_tokens":4000,"stream_options":{"include_usage":true},"stream":true}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    body: |+
+      data: {"id":"gen_01KGF3A2KGQFE5BW4K7TRSZ9DF","object":"chat.completion.chunk","created":1770033319,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"role":"assistant"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_c0bnftwhqr"}
+
+      data: {"id":"gen_01KGF3A2KGQFE5BW4K7TRSZ9DF","object":"chat.completion.chunk","created":1770033319,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":"Olá! "},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_c0bnftwhqr"}
+
+      data: {"id":"gen_01KGF3A2KGQFE5BW4K7TRSZ9DF","object":"chat.completion.chunk","created":1770033319,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":"\n\n("},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_c0bnftwhqr"}
+
+      data: {"id":"gen_01KGF3A2KGQFE5BW4K7TRSZ9DF","object":"chat.completion.chunk","created":1770033319,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":"That"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_c0bnftwhqr"}
+
+      data: {"id":"gen_01KGF3A2KGQFE5BW4K7TRSZ9DF","object":"chat.completion.chunk","created":1770033319,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":"'s \""},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_c0bnftwhqr"}
+
+      data: {"id":"gen_01KGF3A2KGQFE5BW4K7TRSZ9DF","object":"chat.completion.chunk","created":1770033319,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":"hello"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_c0bnftwhqr"}
+
+      data: {"id":"gen_01KGF3A2KGQFE5BW4K7TRSZ9DF","object":"chat.completion.chunk","created":1770033319,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":"\" in Portuguese -"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_c0bnftwhqr"}
+
+      data: {"id":"gen_01KGF3A2KGQFE5BW4K7TRSZ9DF","object":"chat.completion.chunk","created":1770033319,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":" a"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_c0bnftwhqr"}
+
+      data: {"id":"gen_01KGF3A2KGQFE5BW4K7TRSZ9DF","object":"chat.completion.chunk","created":1770033319,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":" friendly"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_c0bnftwhqr"}
+
+      data: {"id":"gen_01KGF3A2KGQFE5BW4K7TRSZ9DF","object":"chat.completion.chunk","created":1770033319,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":" way"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_c0bnftwhqr"}
+
+      data: {"id":"gen_01KGF3A2KGQFE5BW4K7TRSZ9DF","object":"chat.completion.chunk","created":1770033319,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":" to say hi"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_c0bnftwhqr"}
+
+      data: {"id":"gen_01KGF3A2KGQFE5BW4K7TRSZ9DF","object":"chat.completion.chunk","created":1770033319,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":"!)"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_c0bnftwhqr"}
+

providertests/testdata/TestVercelCommonWithAnthropicCache/claude-sonnet-4/tool.yaml 🔗

@@ -0,0 +1,63 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 541
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant","role":"system","cache_control":{"type":"ephemeral"}},{"content":"What''s the weather in Florence,Italy?","role":"user","cache_control":{"type":"ephemeral"}}],"model":"anthropic/claude-sonnet-4","max_tokens":4000,"tool_choice":"auto","tools":[{"function":{"name":"weather","strict":false,"description":"Get weather information for a location","parameters":{"properties":{"location":{"description":"the city","type":"string"}},"required":["location"],"type":"object"}},"type":"function"}]}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    uncompressed: true

providertests/testdata/TestVercelCommonWithAnthropicCache/claude-sonnet-4/tool_streaming.yaml 🔗

@@ -0,0 +1,117 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 595
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant","role":"system","cache_control":{"type":"ephemeral"}},{"content":"What''s the weather in Florence,Italy?","role":"user","cache_control":{"type":"ephemeral"}}],"model":"anthropic/claude-sonnet-4","max_tokens":4000,"stream_options":{"include_usage":true},"tool_choice":"auto","tools":[{"function":{"name":"weather","strict":false,"description":"Get weather information for a location","parameters":{"properties":{"location":{"description":"the city","type":"string"}},"required":["location"],"type":"object"}},"type":"function"}],"stream":true}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    body: |+
+      data: {"id":"gen_01KGF3A9P5RW4MNWQD4RZQY7FA","object":"chat.completion.chunk","created":1770033327,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"role":"assistant"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_afeigrmiv8"}
+
+      data: {"id":"gen_01KGF3A9P5RW4MNWQD4RZQY7FA","object":"chat.completion.chunk","created":1770033327,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":"I'll get"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_afeigrmiv8"}
+
+      data: {"id":"gen_01KGF3A9P5RW4MNWQD4RZQY7FA","object":"chat.completion.chunk","created":1770033327,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":" the weather information"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_afeigrmiv8"}
+
+      data: {"id":"gen_01KGF3A9P5RW4MNWQD4RZQY7FA","object":"chat.completion.chunk","created":1770033327,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"content":" for Florence, Italy for you."},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_afeigrmiv8"}
+
+      data: {"id":"gen_01KGF3A9P5RW4MNWQD4RZQY7FA","object":"chat.completion.chunk","created":1770033327,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"id":"toolu_01NEeARJCxoJWc8F8yKkyTK2","type":"function","function":{"name":"weather","arguments":""}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_afeigrmiv8"}
+
+      data: {"id":"gen_01KGF3A9P5RW4MNWQD4RZQY7FA","object":"chat.completion.chunk","created":1770033327,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\"l"}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_afeigrmiv8"}
+
+      data: {"id":"gen_01KGF3A9P5RW4MNWQD4RZQY7FA","object":"chat.completion.chunk","created":1770033327,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"ocati"}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_afeigrmiv8"}
+
+      data: {"id":"gen_01KGF3A9P5RW4MNWQD4RZQY7FA","object":"chat.completion.chunk","created":1770033327,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"on\": \"Flore"}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_afeigrmiv8"}
+
+      data: {"id":"gen_01KGF3A9P5RW4MNWQD4RZQY7FA","object":"chat.completion.chunk","created":1770033327,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"nce,Italy\"}"}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_afeigrmiv8"}
+

providertests/testdata/TestVercelThinking/claude-sonnet-4-sig/thinking-streaming.yaml 🔗

@@ -0,0 +1,137 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 533
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant","role":"system"},{"content":"What''s the weather in Florence, Italy?","role":"user"}],"model":"anthropic/claude-sonnet-4","stream_options":{"include_usage":true},"tool_choice":"auto","tools":[{"function":{"name":"weather","strict":false,"description":"Get weather information for a location","parameters":{"properties":{"location":{"description":"the city","type":"string"}},"required":["location"],"type":"object"}},"type":"function"}],"reasoning":{"enabled":true},"stream":true}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    body: |+
+      data: {"id":"gen_01KGF3CMBNWVQFHD13H2B0XSBK","object":"chat.completion.chunk","created":1770033404,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"role":"assistant"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_iqjm7xgvli"}
+
+      data: {"id":"gen_01KGF3CMBNWVQFHD13H2B0XSBK","object":"chat.completion.chunk","created":1770033404,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":"The user is asking for","reasoning_details":[{"type":"reasoning.text","text":"The user is asking for","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_iqjm7xgvli"}
+
+      data: {"id":"gen_01KGF3CMBNWVQFHD13H2B0XSBK","object":"chat.completion.chunk","created":1770033404,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" weather","reasoning_details":[{"type":"reasoning.text","text":" weather","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_iqjm7xgvli"}
+
+      data: {"id":"gen_01KGF3CMBNWVQFHD13H2B0XSBK","object":"chat.completion.chunk","created":1770033404,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" information for Florence, Italy. I have","reasoning_details":[{"type":"reasoning.text","text":" information for Florence, Italy. I have","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_iqjm7xgvli"}
+
+      data: {"id":"gen_01KGF3CMBNWVQFHD13H2B0XSBK","object":"chat.completion.chunk","created":1770033404,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" a","reasoning_details":[{"type":"reasoning.text","text":" a","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_iqjm7xgvli"}
+
+      data: {"id":"gen_01KGF3CMBNWVQFHD13H2B0XSBK","object":"chat.completion.chunk","created":1770033404,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" weather","reasoning_details":[{"type":"reasoning.text","text":" weather","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_iqjm7xgvli"}
+
+      data: {"id":"gen_01KGF3CMBNWVQFHD13H2B0XSBK","object":"chat.completion.chunk","created":1770033404,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" function","reasoning_details":[{"type":"reasoning.text","text":" function","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_iqjm7xgvli"}
+
+      data: {"id":"gen_01KGF3CMBNWVQFHD13H2B0XSBK","object":"chat.completion.chunk","created":1770033404,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" available that takes","reasoning_details":[{"type":"reasoning.text","text":" available that takes","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_iqjm7xgvli"}
+
+      data: {"id":"gen_01KGF3CMBNWVQFHD13H2B0XSBK","object":"chat.completion.chunk","created":1770033404,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" a location parameter.","reasoning_details":[{"type":"reasoning.text","text":" a location parameter.","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_iqjm7xgvli"}
+
+      data: {"id":"gen_01KGF3CMBNWVQFHD13H2B0XSBK","object":"chat.completion.chunk","created":1770033404,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" The","reasoning_details":[{"type":"reasoning.text","text":" The","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_iqjm7xgvli"}
+
+      data: {"id":"gen_01KGF3CMBNWVQFHD13H2B0XSBK","object":"chat.completion.chunk","created":1770033404,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" location","reasoning_details":[{"type":"reasoning.text","text":" location","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_iqjm7xgvli"}
+
+      data: {"id":"gen_01KGF3CMBNWVQFHD13H2B0XSBK","object":"chat.completion.chunk","created":1770033404,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" is","reasoning_details":[{"type":"reasoning.text","text":" is","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_iqjm7xgvli"}
+
+      data: {"id":"gen_01KGF3CMBNWVQFHD13H2B0XSBK","object":"chat.completion.chunk","created":1770033404,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" clearly","reasoning_details":[{"type":"reasoning.text","text":" clearly","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_iqjm7xgvli"}
+
+      data: {"id":"gen_01KGF3CMBNWVQFHD13H2B0XSBK","object":"chat.completion.chunk","created":1770033404,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" specified as \"Florence, Italy\" so","reasoning_details":[{"type":"reasoning.text","text":" specified as \"Florence, Italy\" so","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_iqjm7xgvli"}
+
+      data: {"id":"gen_01KGF3CMBNWVQFHD13H2B0XSBK","object":"chat.completion.chunk","created":1770033404,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" I have","reasoning_details":[{"type":"reasoning.text","text":" I have","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_iqjm7xgvli"}
+
+      data: {"id":"gen_01KGF3CMBNWVQFHD13H2B0XSBK","object":"chat.completion.chunk","created":1770033404,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" all the required parameters to","reasoning_details":[{"type":"reasoning.text","text":" all the required parameters to","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_iqjm7xgvli"}
+
+      data: {"id":"gen_01KGF3CMBNWVQFHD13H2B0XSBK","object":"chat.completion.chunk","created":1770033404,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" make the function call.","reasoning_details":[{"type":"reasoning.text","text":" make the function call.","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_iqjm7xgvli"}
+
+      data: {"id":"gen_01KGF3CMBNWVQFHD13H2B0XSBK","object":"chat.completion.chunk","created":1770033404,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":"","reasoning_details":[{"type":"reasoning.text","text":"","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_iqjm7xgvli"}
+
+      data: {"id":"gen_01KGF3CMBNWVQFHD13H2B0XSBK","object":"chat.completion.chunk","created":1770033404,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":"","reasoning_details":[{"type":"reasoning.text","text":"","signature":"EqUDCkYICxgCKkB94vhGG6xKjNnGimHMfRxMKRoDnfxu8X0ubqexC/CZ0gU1q1TlbdRN/J4R0tWVpSFl6fFlfjxBILkP/+4vk0rsEgwiokH4mommeOQbO0kaDDeTcsDepWKtbzn7RCIwM9YXOvgQOWNhGfjjNVXcLf20mCH4hUPTSpIp3F2WEOxI0CLOTKxuwGTgRB4axXhdKowCQwf95zMeALPR1ZZHRXqVm3Dm8Lf9W0kR2/G/irnQLoTR/lxZ5gB6FqUdPqNJHXmn3r6KxrQlxTM97UEZ0hEZHhBvwLk24e4KQitX5oHkNp+XMFCzjaym/Pryfjo4qxde8drr9TmHH1ajVYHkXue2L/RWEDrbUWITgtUP0CgkbAGijhpdJm4xVIMLHOD2cJS2+gChTu85uzdvdowlgHwMB7G5rHR+c1NB/2BV9iwhgVMgVjZlGB0SZmUoPEvPr2ZW2Y56tpnQ5d5zl4GdxImJBTSzoQzEKoGxt94MsXE207I0sp5L0Zzm+p4bDGTU6V0SI+eumemcejIt9O/jlGFFLf539LcU1+qeMWFdpBgB","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_iqjm7xgvli"}
+
+      data: {"id":"gen_01KGF3CMBNWVQFHD13H2B0XSBK","object":"chat.completion.chunk","created":1770033404,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"id":"toolu_01YVA3v6AcXDxaGjsKS8DiUq","type":"function","function":{"name":"weather","arguments":""}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_iqjm7xgvli"}
+
+      data: {"id":"gen_01KGF3CMBNWVQFHD13H2B0XSBK","object":"chat.completion.chunk","created":1770033404,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\"locati"}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_iqjm7xgvli"}
+
+      data: {"id":"gen_01KGF3CMBNWVQFHD13H2B0XSBK","object":"chat.completion.chunk","created":1770033404,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"on\": \"Flore"}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_iqjm7xgvli"}
+
+      data: {"id":"gen_01KGF3CMBNWVQFHD13H2B0XSBK","object":"chat.completion.chunk","created":1770033404,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"nce, Ital"}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_iqjm7xgvli"}
+
+      data: {"id":"gen_01KGF3CMBNWVQFHD13H2B0XSBK","object":"chat.completion.chunk","created":1770033404,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"y\"}"}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_iqjm7xgvli"}
+

providertests/testdata/TestVercelThinking/claude-sonnet-4-sig/thinking.yaml 🔗

@@ -0,0 +1,63 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 479
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant","role":"system"},{"content":"What''s the weather in Florence, Italy?","role":"user"}],"model":"anthropic/claude-sonnet-4","tool_choice":"auto","tools":[{"function":{"name":"weather","strict":false,"description":"Get weather information for a location","parameters":{"properties":{"location":{"description":"the city","type":"string"}},"required":["location"],"type":"object"}},"type":"function"}],"reasoning":{"enabled":true}}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    uncompressed: true

providertests/testdata/TestVercelThinking/claude-sonnet-4/thinking-streaming.yaml 🔗

@@ -0,0 +1,171 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 533
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant","role":"system"},{"content":"What''s the weather in Florence, Italy?","role":"user"}],"model":"anthropic/claude-sonnet-4","stream_options":{"include_usage":true},"tool_choice":"auto","tools":[{"function":{"name":"weather","strict":false,"description":"Get weather information for a location","parameters":{"properties":{"location":{"description":"the city","type":"string"}},"required":["location"],"type":"object"}},"type":"function"}],"reasoning":{"enabled":true},"stream":true}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    body: |+
+      data: {"id":"gen_01KGF3AVVKYFARGPVSM4MZY14H","object":"chat.completion.chunk","created":1770033348,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"role":"assistant"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_moyu1cwxfr"}
+
+      data: {"id":"gen_01KGF3AVVKYFARGPVSM4MZY14H","object":"chat.completion.chunk","created":1770033348,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":"The user is asking for","reasoning_details":[{"type":"reasoning.text","text":"The user is asking for","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_moyu1cwxfr"}
+
+      data: {"id":"gen_01KGF3AVVKYFARGPVSM4MZY14H","object":"chat.completion.chunk","created":1770033348,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" weather","reasoning_details":[{"type":"reasoning.text","text":" weather","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_moyu1cwxfr"}
+
+      data: {"id":"gen_01KGF3AVVKYFARGPVSM4MZY14H","object":"chat.completion.chunk","created":1770033348,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" information for Florence, Italy. I have","reasoning_details":[{"type":"reasoning.text","text":" information for Florence, Italy. I have","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_moyu1cwxfr"}
+
+      data: {"id":"gen_01KGF3AVVKYFARGPVSM4MZY14H","object":"chat.completion.chunk","created":1770033348,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" a weather","reasoning_details":[{"type":"reasoning.text","text":" a weather","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_moyu1cwxfr"}
+
+      data: {"id":"gen_01KGF3AVVKYFARGPVSM4MZY14H","object":"chat.completion.chunk","created":1770033348,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" function available","reasoning_details":[{"type":"reasoning.text","text":" function available","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_moyu1cwxfr"}
+
+      data: {"id":"gen_01KGF3AVVKYFARGPVSM4MZY14H","object":"chat.completion.chunk","created":1770033348,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" that takes","reasoning_details":[{"type":"reasoning.text","text":" that takes","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_moyu1cwxfr"}
+
+      data: {"id":"gen_01KGF3AVVKYFARGPVSM4MZY14H","object":"chat.completion.chunk","created":1770033348,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" a location parameter.","reasoning_details":[{"type":"reasoning.text","text":" a location parameter.","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_moyu1cwxfr"}
+
+      data: {"id":"gen_01KGF3AVVKYFARGPVSM4MZY14H","object":"chat.completion.chunk","created":1770033348,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" The","reasoning_details":[{"type":"reasoning.text","text":" The","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_moyu1cwxfr"}
+
+      data: {"id":"gen_01KGF3AVVKYFARGPVSM4MZY14H","object":"chat.completion.chunk","created":1770033348,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" user has","reasoning_details":[{"type":"reasoning.text","text":" user has","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_moyu1cwxfr"}
+
+      data: {"id":"gen_01KGF3AVVKYFARGPVSM4MZY14H","object":"chat.completion.chunk","created":1770033348,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" provided the location as","reasoning_details":[{"type":"reasoning.text","text":" provided the location as","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_moyu1cwxfr"}
+
+      data: {"id":"gen_01KGF3AVVKYFARGPVSM4MZY14H","object":"chat.completion.chunk","created":1770033348,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" \"Florence, Italy\" which","reasoning_details":[{"type":"reasoning.text","text":" \"Florence, Italy\" which","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_moyu1cwxfr"}
+
+      data: {"id":"gen_01KGF3AVVKYFARGPVSM4MZY14H","object":"chat.completion.chunk","created":1770033348,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" is specific","reasoning_details":[{"type":"reasoning.text","text":" is specific","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_moyu1cwxfr"}
+
+      data: {"id":"gen_01KGF3AVVKYFARGPVSM4MZY14H","object":"chat.completion.chunk","created":1770033348,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" enough for","reasoning_details":[{"type":"reasoning.text","text":" enough for","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_moyu1cwxfr"}
+
+      data: {"id":"gen_01KGF3AVVKYFARGPVSM4MZY14H","object":"chat.completion.chunk","created":1770033348,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" the weather function.","reasoning_details":[{"type":"reasoning.text","text":" the weather function.","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_moyu1cwxfr"}
+
+      data: {"id":"gen_01KGF3AVVKYFARGPVSM4MZY14H","object":"chat.completion.chunk","created":1770033348,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" I have","reasoning_details":[{"type":"reasoning.text","text":" I have","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_moyu1cwxfr"}
+
+      data: {"id":"gen_01KGF3AVVKYFARGPVSM4MZY14H","object":"chat.completion.chunk","created":1770033348,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" all the required parameters to make the function","reasoning_details":[{"type":"reasoning.text","text":" all the required parameters to make the function","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_moyu1cwxfr"}
+
+      data: {"id":"gen_01KGF3AVVKYFARGPVSM4MZY14H","object":"chat.completion.chunk","created":1770033348,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":" call.","reasoning_details":[{"type":"reasoning.text","text":" call.","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_moyu1cwxfr"}
+
+      data: {"id":"gen_01KGF3AVVKYFARGPVSM4MZY14H","object":"chat.completion.chunk","created":1770033348,"model":"anthropic/claude-sonnet-4","choices":[{"index":0,"delta":{"reasoning":"","reasoning_details":[{"type":"reasoning.text","text":"","signature":"","format":"anthropic-claude-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_moyu1cwxfr"}
+

providertests/testdata/TestVercelThinking/claude-sonnet-4/thinking.yaml 🔗

@@ -0,0 +1,63 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 479
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant","role":"system"},{"content":"What''s the weather in Florence, Italy?","role":"user"}],"model":"anthropic/claude-sonnet-4","tool_choice":"auto","tools":[{"function":{"name":"weather","strict":false,"description":"Get weather information for a location","parameters":{"properties":{"location":{"description":"the city","type":"string"}},"required":["location"],"type":"object"}},"type":"function"}],"reasoning":{"enabled":true}}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    uncompressed: true

providertests/testdata/TestVercelThinking/gemini-3-pro-preview/thinking-streaming.yaml 🔗

@@ -0,0 +1,87 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 535
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant","role":"system"},{"content":"What''s the weather in Florence, Italy?","role":"user"}],"model":"google/gemini-3-pro-preview","stream_options":{"include_usage":true},"tool_choice":"auto","tools":[{"function":{"name":"weather","strict":false,"description":"Get weather information for a location","parameters":{"properties":{"location":{"description":"the city","type":"string"}},"required":["location"],"type":"object"}},"type":"function"}],"reasoning":{"enabled":true},"stream":true}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    body: |+
+      data: {"id":"gen_01KGF3C0Q49GHB7YJE7PKYYXEK","object":"chat.completion.chunk","created":1770033385,"model":"google/gemini-3-pro-preview","choices":[{"index":0,"delta":{"role":"assistant"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_q2vuvpzyv8"}
+
+      data: {"id":"gen_01KGF3C0Q49GHB7YJE7PKYYXEK","object":"chat.completion.chunk","created":1770033385,"model":"google/gemini-3-pro-preview","choices":[{"index":0,"delta":{"reasoning":"**Analyzing the User's Query**\n\nI'm focused on understanding the user's intent. My primary task is to identify the location for the weather query. It's evident the user wants weather data for Florence, Italy. I'll utilize the `weather` tool and set the `location` parameter accordingly.\n\n\n","reasoning_details":[{"type":"reasoning.text","text":"**Analyzing the User's Query**\n\nI'm focused on understanding the user's intent. My primary task is to identify the location for the weather query. It's evident the user wants weather data for Florence, Italy. I'll utilize the `weather` tool and set the `location` parameter accordingly.\n\n\n","format":"google-gemini-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_q2vuvpzyv8"}
+
+      data: {"id":"gen_01KGF3C0Q49GHB7YJE7PKYYXEK","object":"chat.completion.chunk","created":1770033385,"model":"google/gemini-3-pro-preview","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"id":"SmUJTLmrRxi0lu0v","type":"function","function":{"name":"weather","arguments":""}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_q2vuvpzyv8"}
+
+      data: {"id":"gen_01KGF3C0Q49GHB7YJE7PKYYXEK","object":"chat.completion.chunk","created":1770033385,"model":"google/gemini-3-pro-preview","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\"location\":\"Florence, Italy\"}"}}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_q2vuvpzyv8"}
+
+      data: {"id":"gen_01KGF3C0Q49GHB7YJE7PKYYXEK","object":"chat.completion.chunk","created":1770033385,"model":"google/gemini-3-pro-preview","choices":[{"index":0,"delta":{"reasoning_details":[{"type":"reasoning.encrypted","data":"CiQBjz1rXzHrIG8VyKxoyvNfn34I0fU0MFB3Xbo4yGmi5lXxz9oKZgGPPWtfH7TJlamiXjXsugVyW8VxgOUJprUjmiUwJi/stNqMS3LooRoUA1fBkdgztTDDIGWmRMXQtWqouGUTAeueCke7VJzsJm3GwC5Czg9laR24rLZ/AU+1oJpvT5AmZHm0AYnOGAp3AY89a1+1SkejrgsX83pkwVBjFrYRepT+UEelh8Kexyj/KndelV/yYbA+gWgGGSMKe4zq3JKh99Toc1/OqRXykl9qKTNXZj1xaIkP98KH/4mRnVw4Mvc3mE2R97bmzKTFSxzAlJ7+XFtYnZjg7PiNxuwEiZIlG3kKNAGPPWtfzlrIZvrYjj2PtuKOaHbd5iP/2u4dRMmmPZ8fi8DHZdSpnDnJAKI/TVQikZmdBZo=","format":"google-gemini-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_q2vuvpzyv8"}
+

providertests/testdata/TestVercelThinking/gemini-3-pro-preview/thinking.yaml 🔗

@@ -0,0 +1,63 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 481
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant","role":"system"},{"content":"What''s the weather in Florence, Italy?","role":"user"}],"model":"google/gemini-3-pro-preview","tool_choice":"auto","tools":[{"function":{"name":"weather","strict":false,"description":"Get weather information for a location","parameters":{"properties":{"location":{"description":"the city","type":"string"}},"required":["location"],"type":"object"}},"type":"function"}],"reasoning":{"enabled":true}}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    uncompressed: true

providertests/testdata/TestVercelThinking/gpt-5/thinking-streaming.yaml 🔗

@@ -0,0 +1,343 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 520
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant","role":"system"},{"content":"What''s the weather in Florence, Italy?","role":"user"}],"model":"openai/gpt-5","stream_options":{"include_usage":true},"tool_choice":"auto","tools":[{"function":{"name":"weather","strict":false,"description":"Get weather information for a location","parameters":{"properties":{"location":{"description":"the city","type":"string"}},"required":["location"],"type":"object"}},"type":"function"}],"reasoning":{"enabled":true},"stream":true}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    body: |+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"role":"assistant"},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":"**Getting","reasoning_details":[{"type":"reasoning.summary","summary":"**Getting","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" weather","reasoning_details":[{"type":"reasoning.summary","summary":" weather","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" information","reasoning_details":[{"type":"reasoning.summary","summary":" information","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":"**\n\nI","reasoning_details":[{"type":"reasoning.summary","summary":"**\n\nI","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" need","reasoning_details":[{"type":"reasoning.summary","summary":" need","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" to","reasoning_details":[{"type":"reasoning.summary","summary":" to","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" retrieve","reasoning_details":[{"type":"reasoning.summary","summary":" retrieve","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" the","reasoning_details":[{"type":"reasoning.summary","summary":" the","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" weather","reasoning_details":[{"type":"reasoning.summary","summary":" weather","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" using","reasoning_details":[{"type":"reasoning.summary","summary":" using","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" the","reasoning_details":[{"type":"reasoning.summary","summary":" the","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" designated","reasoning_details":[{"type":"reasoning.summary","summary":" designated","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" tool","reasoning_details":[{"type":"reasoning.summary","summary":" tool","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":",","reasoning_details":[{"type":"reasoning.summary","summary":",","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" specifically","reasoning_details":[{"type":"reasoning.summary","summary":" specifically","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" the","reasoning_details":[{"type":"reasoning.summary","summary":" the","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" functions","reasoning_details":[{"type":"reasoning.summary","summary":" functions","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":".weather","reasoning_details":[{"type":"reasoning.summary","summary":".weather","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" with","reasoning_details":[{"type":"reasoning.summary","summary":" with","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" the","reasoning_details":[{"type":"reasoning.summary","summary":" the","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" location","reasoning_details":[{"type":"reasoning.summary","summary":" location","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" parameter","reasoning_details":[{"type":"reasoning.summary","summary":" parameter","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":".","reasoning_details":[{"type":"reasoning.summary","summary":".","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" Even","reasoning_details":[{"type":"reasoning.summary","summary":" Even","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" though","reasoning_details":[{"type":"reasoning.summary","summary":" though","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" there's","reasoning_details":[{"type":"reasoning.summary","summary":" there's","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" a","reasoning_details":[{"type":"reasoning.summary","summary":" a","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" multi","reasoning_details":[{"type":"reasoning.summary","summary":" multi","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":"_tool","reasoning_details":[{"type":"reasoning.summary","summary":"_tool","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":"_use","reasoning_details":[{"type":"reasoning.summary","summary":"_use","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":".parallel","reasoning_details":[{"type":"reasoning.summary","summary":".parallel","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" option","reasoning_details":[{"type":"reasoning.summary","summary":" option","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" for","reasoning_details":[{"type":"reasoning.summary","summary":" for","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" running","reasoning_details":[{"type":"reasoning.summary","summary":" running","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" multiple","reasoning_details":[{"type":"reasoning.summary","summary":" multiple","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" tools","reasoning_details":[{"type":"reasoning.summary","summary":" tools","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":",","reasoning_details":[{"type":"reasoning.summary","summary":",","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" I'm","reasoning_details":[{"type":"reasoning.summary","summary":" I'm","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" focused","reasoning_details":[{"type":"reasoning.summary","summary":" focused","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" on","reasoning_details":[{"type":"reasoning.summary","summary":" on","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" just","reasoning_details":[{"type":"reasoning.summary","summary":" just","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" one","reasoning_details":[{"type":"reasoning.summary","summary":" one","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" tool","reasoning_details":[{"type":"reasoning.summary","summary":" tool","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" here","reasoning_details":[{"type":"reasoning.summary","summary":" here","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":",","reasoning_details":[{"type":"reasoning.summary","summary":",","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" so","reasoning_details":[{"type":"reasoning.summary","summary":" so","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" I'll","reasoning_details":[{"type":"reasoning.summary","summary":" I'll","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" go","reasoning_details":[{"type":"reasoning.summary","summary":" go","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" ahead","reasoning_details":[{"type":"reasoning.summary","summary":" ahead","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" and","reasoning_details":[{"type":"reasoning.summary","summary":" and","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" call","reasoning_details":[{"type":"reasoning.summary","summary":" call","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" functions","reasoning_details":[{"type":"reasoning.summary","summary":" functions","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":".weather","reasoning_details":[{"type":"reasoning.summary","summary":".weather","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" directly","reasoning_details":[{"type":"reasoning.summary","summary":" directly","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" with","reasoning_details":[{"type":"reasoning.summary","summary":" with","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" \"","reasoning_details":[{"type":"reasoning.summary","summary":" \"","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":"Flor","reasoning_details":[{"type":"reasoning.summary","summary":"Flor","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":"ence","reasoning_details":[{"type":"reasoning.summary","summary":"ence","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":",","reasoning_details":[{"type":"reasoning.summary","summary":",","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" Italy","reasoning_details":[{"type":"reasoning.summary","summary":" Italy","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":".\"","reasoning_details":[{"type":"reasoning.summary","summary":".\"","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" I'll","reasoning_details":[{"type":"reasoning.summary","summary":" I'll","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" make","reasoning_details":[{"type":"reasoning.summary","summary":" make","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" sure","reasoning_details":[{"type":"reasoning.summary","summary":" sure","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" to","reasoning_details":[{"type":"reasoning.summary","summary":" to","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" use","reasoning_details":[{"type":"reasoning.summary","summary":" use","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" the","reasoning_details":[{"type":"reasoning.summary","summary":" the","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" commentary","reasoning_details":[{"type":"reasoning.summary","summary":" commentary","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" channel","reasoning_details":[{"type":"reasoning.summary","summary":" channel","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" for","reasoning_details":[{"type":"reasoning.summary","summary":" for","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" this","reasoning_details":[{"type":"reasoning.summary","summary":" this","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" call","reasoning_details":[{"type":"reasoning.summary","summary":" call","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":".","reasoning_details":[{"type":"reasoning.summary","summary":".","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" Once","reasoning_details":[{"type":"reasoning.summary","summary":" Once","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" I","reasoning_details":[{"type":"reasoning.summary","summary":" I","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" receive","reasoning_details":[{"type":"reasoning.summary","summary":" receive","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" the","reasoning_details":[{"type":"reasoning.summary","summary":" the","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" weather","reasoning_details":[{"type":"reasoning.summary","summary":" weather","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" response","reasoning_details":[{"type":"reasoning.summary","summary":" response","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":",","reasoning_details":[{"type":"reasoning.summary","summary":",","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" I'll","reasoning_details":[{"type":"reasoning.summary","summary":" I'll","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" present","reasoning_details":[{"type":"reasoning.summary","summary":" present","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" the","reasoning_details":[{"type":"reasoning.summary","summary":" the","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" information","reasoning_details":[{"type":"reasoning.summary","summary":" information","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" clearly","reasoning_details":[{"type":"reasoning.summary","summary":" clearly","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" and","reasoning_details":[{"type":"reasoning.summary","summary":" and","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" succinct","reasoning_details":[{"type":"reasoning.summary","summary":" succinct","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":"ly","reasoning_details":[{"type":"reasoning.summary","summary":"ly","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":".","reasoning_details":[{"type":"reasoning.summary","summary":".","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" Let's","reasoning_details":[{"type":"reasoning.summary","summary":" Let's","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":" go","reasoning_details":[{"type":"reasoning.summary","summary":" go","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+
+      data: {"id":"gen_01KGF3BHK17PHK9V9DEN2A1ZAE","object":"chat.completion.chunk","created":1770033366,"model":"openai/gpt-5","choices":[{"index":0,"delta":{"reasoning":"!","reasoning_details":[{"type":"reasoning.summary","summary":"!","id":"rs_0430adf43712f48201698090d71990819682f523d5f4a4cd6a","format":"openai-responses-v1","index":0}]},"logprobs":null,"finish_reason":null}],"system_fingerprint":"fp_5gf37iohnb"}
+

providertests/testdata/TestVercelThinking/gpt-5/thinking.yaml 🔗

@@ -0,0 +1,63 @@
+---
+version: 2
+interactions:
+- id: 0
+  request:
+    proto: HTTP/1.1
+    proto_major: 1
+    proto_minor: 1
+    content_length: 466
+    host: ""
+    body: '{"messages":[{"content":"You are a helpful assistant","role":"system"},{"content":"What''s the weather in Florence, Italy?","role":"user"}],"model":"openai/gpt-5","tool_choice":"auto","tools":[{"function":{"name":"weather","strict":false,"description":"Get weather information for a location","parameters":{"properties":{"location":{"description":"the city","type":"string"}},"required":["location"],"type":"object"}},"type":"function"}],"reasoning":{"enabled":true}}'
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+      User-Agent:
+      - OpenAI/Go 2.7.1
+    url: https://ai-gateway.vercel.sh/v1/chat/completions
+    method: POST
+  response:
+    proto: HTTP/2.0
+    proto_major: 2
+    proto_minor: 0
+    content_length: -1
+    uncompressed: true

providertests/vercel_test.go 🔗

@@ -0,0 +1,124 @@
+package providertests
+
+import (
+	"net/http"
+	"os"
+	"testing"
+
+	"charm.land/fantasy"
+	"charm.land/fantasy/providers/anthropic"
+	"charm.land/fantasy/providers/vercel"
+	"charm.land/x/vcr"
+	"github.com/stretchr/testify/require"
+)
+
+var vercelTestModels = []testModel{
+	{"claude-sonnet-4", "anthropic/claude-sonnet-4", true},
+	{"gemini-2.5-flash", "google/gemini-2.5-flash", false},
+	{"gpt-5", "openai/gpt-5", true},
+	{"gemini-3-pro-preview", "google/gemini-3-pro-preview", true},
+}
+
+func TestVercelCommon(t *testing.T) {
+	var pairs []builderPair
+	for _, m := range vercelTestModels {
+		pairs = append(pairs, builderPair{m.name, vercelBuilder(m.model), nil, nil})
+	}
+	testCommon(t, pairs)
+}
+
+func TestVercelCommonWithAnthropicCache(t *testing.T) {
+	testCommon(t, []builderPair{
+		{"claude-sonnet-4", vercelBuilder("anthropic/claude-sonnet-4"), nil, addAnthropicCaching},
+	})
+}
+
+func TestVercelThinking(t *testing.T) {
+	enabled := true
+	opts := fantasy.ProviderOptions{
+		vercel.Name: &vercel.ProviderOptions{
+			Reasoning: &vercel.ReasoningOptions{
+				Enabled: &enabled,
+			},
+		},
+	}
+
+	var pairs []builderPair
+	for _, m := range vercelTestModels {
+		if !m.reasoning {
+			continue
+		}
+		pairs = append(pairs, builderPair{m.name, vercelBuilder(m.model), opts, nil})
+	}
+	testThinking(t, pairs, testVercelThinking)
+
+	// test anthropic signature
+	testThinking(t, []builderPair{
+		{"claude-sonnet-4-sig", vercelBuilder("anthropic/claude-sonnet-4"), opts, nil},
+	}, testVercelThinkingWithSignature)
+}
+
+func testVercelThinkingWithSignature(t *testing.T, result *fantasy.AgentResult) {
+	reasoningContentCount := 0
+	signaturesCount := 0
+	// Test if we got the signature
+	for _, step := range result.Steps {
+		for _, msg := range step.Messages {
+			for _, content := range msg.Content {
+				if content.GetType() == fantasy.ContentTypeReasoning {
+					reasoningContentCount += 1
+					reasoningContent, ok := fantasy.AsContentType[fantasy.ReasoningPart](content)
+					if !ok {
+						continue
+					}
+					if len(reasoningContent.ProviderOptions) == 0 {
+						continue
+					}
+
+					anthropicReasoningMetadata, ok := reasoningContent.ProviderOptions[anthropic.Name]
+					if !ok {
+						continue
+					}
+					if reasoningContent.Text != "" {
+						if typed, ok := anthropicReasoningMetadata.(*anthropic.ReasoningOptionMetadata); ok {
+							require.NotEmpty(t, typed.Signature)
+							signaturesCount += 1
+						}
+					}
+				}
+			}
+		}
+	}
+	require.Greater(t, reasoningContentCount, 0)
+	require.Greater(t, signaturesCount, 0)
+	require.Equal(t, reasoningContentCount, signaturesCount)
+	// we also add the anthropic metadata so test that
+	testAnthropicThinking(t, result)
+}
+
+func testVercelThinking(t *testing.T, result *fantasy.AgentResult) {
+	reasoningContentCount := 0
+	for _, step := range result.Steps {
+		for _, msg := range step.Messages {
+			for _, content := range msg.Content {
+				if content.GetType() == fantasy.ContentTypeReasoning {
+					reasoningContentCount += 1
+				}
+			}
+		}
+	}
+	require.Greater(t, reasoningContentCount, 0)
+}
+
+func vercelBuilder(model string) builderFunc {
+	return func(t *testing.T, r *vcr.Recorder) (fantasy.LanguageModel, error) {
+		provider, err := vercel.New(
+			vercel.WithAPIKey(os.Getenv("FANTASY_VERCEL_API_KEY")),
+			vercel.WithHTTPClient(&http.Client{Transport: r}),
+		)
+		if err != nil {
+			return nil, err
+		}
+		return provider.LanguageModel(t.Context(), model)
+	}
+}