chore: various updates (#17)

Kujtim Hoxha created

* feat(tools): add provider options to agent tools

this is needed so that we can set caching params for
anthropic/openrouter

* feat(anthropic): allow adding cache control at the system message level

* fix(tests): add new options to tool

* chore: improve parse options

* fix(openai): finish reason mapping

* feat: allow prepare step to update the context

* fix: apparently not a good idea to change the tool ids

* feat: add headers to azure provider

* chore: add additional options to the google provider

Change summary

ai/agent.go                                                                                 |  11 
ai/agent_stream_test.go                                                                     |  12 
ai/agent_test.go                                                                            |  47 
ai/provider.go                                                                              |   1 
ai/tool.go                                                                                  |  19 
anthropic/anthropic.go                                                                      |  14 
anthropic/provider_options.go                                                               |   8 
azure/azure.go                                                                              |   6 
google/google.go                                                                            |  26 
google/provider_options.go                                                                  |  10 
openai/language_model.go                                                                    |  46 
openai/language_model_hooks.go                                                              |  11 
openai/openai.go                                                                            |   8 
openai/provider_options.go                                                                  |   8 
openaicompat/openaicompat.go                                                                |  12 
openaicompat/provider_options.go                                                            |   8 
openrouter/language_model_hooks.go                                                          |  23 
openrouter/openrouter.go                                                                    |  13 
openrouter/provider_options.go                                                              |   8 
providertests/openrouter_test.go                                                            | 104 
providertests/testdata/TestOpenRouterWithUniqueToolCallIDs/stream_unique_tool_call_ids.yaml | 131 
providertests/testdata/TestOpenRouterWithUniqueToolCallIDs/unique_tool_call_ids.yaml        |  55 
22 files changed, 150 insertions(+), 431 deletions(-)

Detailed changes

ai/agent.go 🔗

@@ -105,7 +105,7 @@ type ToolCallRepairOptions struct {
 }
 
 type (
-	PrepareStepFunction    = func(options PrepareStepFunctionOptions) (PrepareStepResult, error)
+	PrepareStepFunction    = func(ctx context.Context, options PrepareStepFunctionOptions) (context.Context, PrepareStepResult, error)
 	OnStepFinishedFunction = func(step StepResult)
 	RepairToolCallFunction = func(ctx context.Context, options ToolCallRepairOptions) (*ToolCallContent, error)
 )
@@ -357,7 +357,7 @@ func (a *agent) Generate(ctx context.Context, opts AgentCall) (*AgentResult, err
 		disableAllTools := false
 
 		if opts.PrepareStep != nil {
-			prepared, err := opts.PrepareStep(PrepareStepFunctionOptions{
+			updatedCtx, prepared, err := opts.PrepareStep(ctx, PrepareStepFunctionOptions{
 				Model:      stepModel,
 				Steps:      steps,
 				StepNumber: len(steps),
@@ -367,6 +367,8 @@ func (a *agent) Generate(ctx context.Context, opts AgentCall) (*AgentResult, err
 				return nil, err
 			}
 
+			ctx = updatedCtx
+
 			// Apply prepared step modifications
 			if prepared.Messages != nil {
 				stepInputMessages = prepared.Messages
@@ -774,7 +776,7 @@ func (a *agent) Stream(ctx context.Context, opts AgentStreamCall) (*AgentResult,
 
 		// Apply step preparation if provided
 		if call.PrepareStep != nil {
-			prepared, err := call.PrepareStep(PrepareStepFunctionOptions{
+			updatedCtx, prepared, err := call.PrepareStep(ctx, PrepareStepFunctionOptions{
 				Model:      stepModel,
 				Steps:      steps,
 				StepNumber: stepNumber,
@@ -784,6 +786,8 @@ func (a *agent) Stream(ctx context.Context, opts AgentStreamCall) (*AgentResult,
 				return nil, err
 			}
 
+			ctx = updatedCtx
+
 			if prepared.Messages != nil {
 				stepInputMessages = prepared.Messages
 			}
@@ -912,6 +916,7 @@ func (a *agent) prepareTools(tools []AgentTool, activeTools []string, disableAll
 				"properties": info.Parameters,
 				"required":   info.Required,
 			},
+			ProviderOptions: tool.ProviderOptions(),
 		})
 	}
 	return preparedTools

ai/agent_stream_test.go 🔗

@@ -10,7 +10,17 @@ import (
 )
 
 // EchoTool is a simple tool that echoes back the input message
-type EchoTool struct{}
+type EchoTool struct {
+	providerOptions ProviderOptions
+}
+
+func (e *EchoTool) SetProviderOptions(opts ProviderOptions) {
+	e.providerOptions = opts
+}
+
+func (e *EchoTool) ProviderOptions() ProviderOptions {
+	return e.providerOptions
+}
 
 // Info returns the tool information
 func (e *EchoTool) Info() ToolInfo {

ai/agent_test.go 🔗

@@ -12,11 +12,20 @@ import (
 
 // Mock tool for testing
 type mockTool struct {
-	name        string
-	description string
-	parameters  map[string]any
-	required    []string
-	executeFunc func(ctx context.Context, call ToolCall) (ToolResponse, error)
+	name            string
+	providerOptions ProviderOptions
+	description     string
+	parameters      map[string]any
+	required        []string
+	executeFunc     func(ctx context.Context, call ToolCall) (ToolResponse, error)
+}
+
+func (m *mockTool) SetProviderOptions(opts ProviderOptions) {
+	m.providerOptions = opts
+}
+
+func (m *mockTool) ProviderOptions() ProviderOptions {
+	return m.providerOptions
 }
 
 func (m *mockTool) Info() ToolInfo {
@@ -923,9 +932,9 @@ func TestPrepareStep(t *testing.T) {
 			},
 		}
 
-		prepareStepFunc := func(options PrepareStepFunctionOptions) (PrepareStepResult, error) {
+		prepareStepFunc := func(ctx context.Context, options PrepareStepFunctionOptions) (context.Context, PrepareStepResult, error) {
 			newSystem := "Modified system prompt for step " + fmt.Sprintf("%d", options.StepNumber)
-			return PrepareStepResult{
+			return ctx, PrepareStepResult{
 				Model:    options.Model,
 				Messages: options.Messages,
 				System:   &newSystem,
@@ -960,9 +969,9 @@ func TestPrepareStep(t *testing.T) {
 			},
 		}
 
-		prepareStepFunc := func(options PrepareStepFunctionOptions) (PrepareStepResult, error) {
+		prepareStepFunc := func(ctx context.Context, options PrepareStepFunctionOptions) (context.Context, PrepareStepResult, error) {
 			toolChoice := ToolChoiceNone
-			return PrepareStepResult{
+			return ctx, PrepareStepResult{
 				Model:      options.Model,
 				Messages:   options.Messages,
 				ToolChoice: &toolChoice,
@@ -1005,9 +1014,9 @@ func TestPrepareStep(t *testing.T) {
 		tool2 := &mockTool{name: "tool2", description: "Tool 2"}
 		tool3 := &mockTool{name: "tool3", description: "Tool 3"}
 
-		prepareStepFunc := func(options PrepareStepFunctionOptions) (PrepareStepResult, error) {
+		prepareStepFunc := func(ctx context.Context, options PrepareStepFunctionOptions) (context.Context, PrepareStepResult, error) {
 			activeTools := []string{"tool2"} // Only tool2 should be active
-			return PrepareStepResult{
+			return ctx, PrepareStepResult{
 				Model:       options.Model,
 				Messages:    options.Messages,
 				ActiveTools: activeTools,
@@ -1045,8 +1054,8 @@ func TestPrepareStep(t *testing.T) {
 
 		tool1 := &mockTool{name: "tool1", description: "Tool 1"}
 
-		prepareStepFunc := func(options PrepareStepFunctionOptions) (PrepareStepResult, error) {
-			return PrepareStepResult{
+		prepareStepFunc := func(ctx context.Context, options PrepareStepFunctionOptions) (context.Context, PrepareStepResult, error) {
+			return ctx, PrepareStepResult{
 				Model:           options.Model,
 				Messages:        options.Messages,
 				DisableAllTools: true, // Disable all tools for this step
@@ -1100,11 +1109,11 @@ func TestPrepareStep(t *testing.T) {
 		tool1 := &mockTool{name: "tool1", description: "Tool 1"}
 		tool2 := &mockTool{name: "tool2", description: "Tool 2"}
 
-		prepareStepFunc := func(options PrepareStepFunctionOptions) (PrepareStepResult, error) {
+		prepareStepFunc := func(ctx context.Context, options PrepareStepFunctionOptions) (context.Context, PrepareStepResult, error) {
 			newSystem := "Step-specific system"
 			toolChoice := SpecificToolChoice("tool1")
 			activeTools := []string{"tool1"}
-			return PrepareStepResult{
+			return ctx, PrepareStepResult{
 				Model:       options.Model,
 				Messages:    options.Messages,
 				System:      &newSystem,
@@ -1163,9 +1172,9 @@ func TestPrepareStep(t *testing.T) {
 
 		tool1 := &mockTool{name: "tool1", description: "Tool 1"}
 
-		prepareStepFunc := func(options PrepareStepFunctionOptions) (PrepareStepResult, error) {
+		prepareStepFunc := func(ctx context.Context, options PrepareStepFunctionOptions) (context.Context, PrepareStepResult, error) {
 			// All optional fields are nil, should use parent values
-			return PrepareStepResult{
+			return ctx, PrepareStepResult{
 				Model:       options.Model,
 				Messages:    options.Messages,
 				System:      nil, // Use parent
@@ -1212,8 +1221,8 @@ func TestPrepareStep(t *testing.T) {
 		tool1 := &mockTool{name: "tool1", description: "Tool 1"}
 		tool2 := &mockTool{name: "tool2", description: "Tool 2"}
 
-		prepareStepFunc := func(options PrepareStepFunctionOptions) (PrepareStepResult, error) {
-			return PrepareStepResult{
+		prepareStepFunc := func(ctx context.Context, options PrepareStepFunctionOptions) (context.Context, PrepareStepResult, error) {
+			return ctx, PrepareStepResult{
 				Model:       options.Model,
 				Messages:    options.Messages,
 				ActiveTools: []string{}, // Empty slice means all tools

ai/provider.go 🔗

@@ -3,5 +3,4 @@ package ai
 type Provider interface {
 	Name() string
 	LanguageModel(modelID string) (LanguageModel, error)
-	ParseOptions(data map[string]any) (ProviderOptionsData, error)
 }

ai/tool.go 🔗

@@ -81,6 +81,8 @@ func WithResponseMetadata(response ToolResponse, metadata any) ToolResponse {
 type AgentTool interface {
 	Info() ToolInfo
 	Run(ctx context.Context, params ToolCall) (ToolResponse, error)
+	ProviderOptions() ProviderOptions
+	SetProviderOptions(opts ProviderOptions)
 }
 
 // NewAgentTool creates a typed tool from a function with automatic schema generation.
@@ -103,10 +105,19 @@ func NewAgentTool[TInput any](
 
 // funcToolWrapper wraps a function to implement the AgentTool interface.
 type funcToolWrapper[TInput any] struct {
-	name        string
-	description string
-	fn          func(ctx context.Context, input TInput, call ToolCall) (ToolResponse, error)
-	schema      Schema
+	name            string
+	description     string
+	fn              func(ctx context.Context, input TInput, call ToolCall) (ToolResponse, error)
+	schema          Schema
+	providerOptions ProviderOptions
+}
+
+func (w *funcToolWrapper[TInput]) SetProviderOptions(opts ProviderOptions) {
+	w.providerOptions = opts
+}
+
+func (w *funcToolWrapper[TInput]) ProviderOptions() ProviderOptions {
+	return w.providerOptions
 }
 
 func (w *funcToolWrapper[TInput]) Info() ToolInfo {

anthropic/anthropic.go 🔗

@@ -260,14 +260,6 @@ func (a languageModel) prepareParams(call ai.Call) (*anthropic.MessageNewParams,
 	return params, warnings, nil
 }
 
-func (a *provider) ParseOptions(data map[string]any) (ai.ProviderOptionsData, error) {
-	var options ProviderOptions
-	if err := ai.ParseOptions(data, &options); err != nil {
-		return nil, err
-	}
-	return &options, nil
-}
-
 func (a *provider) Name() string {
 	return Name
 }
@@ -441,8 +433,12 @@ func toPrompt(prompt ai.Prompt, sendReasoningData bool) ([]anthropic.TextBlockPa
 			}
 			finishedSystemBlock = true
 			for _, msg := range block.Messages {
-				for _, part := range msg.Content {
+				for i, part := range msg.Content {
+					isLastPart := i == len(msg.Content)-1
 					cacheControl := getCacheControl(part.Options())
+					if cacheControl == nil && isLastPart {
+						cacheControl = getCacheControl(msg.ProviderOptions)
+					}
 					text, ok := ai.AsMessagePart[ai.TextPart](part)
 					if !ok {
 						continue

anthropic/provider_options.go 🔗

@@ -42,3 +42,11 @@ func NewProviderCacheControlOptions(opts *ProviderCacheControlOptions) ai.Provid
 		Name: opts,
 	}
 }
+
+func ParseOptions(data map[string]any) (*ProviderOptions, error) {
+	var options ProviderOptions
+	if err := ai.ParseOptions(data, &options); err != nil {
+		return nil, err
+	}
+	return &options, nil
+}

azure/azure.go 🔗

@@ -53,6 +53,12 @@ func WithAPIKey(apiKey string) Option {
 	}
 }
 
+func WithHeaders(headers map[string]string) Option {
+	return func(o *options) {
+		o.openaiOptions = append(o.openaiOptions, openaicompat.WithHeaders(headers))
+	}
+}
+
 func WithAPIVersion(version string) Option {
 	return func(o *options) {
 		o.apiVersion = version

google/google.go 🔗

@@ -28,6 +28,7 @@ type provider struct {
 type options struct {
 	apiKey   string
 	name     string
+	baseURL  string
 	headers  map[string]string
 	client   *http.Client
 	backend  genai.Backend
@@ -53,6 +54,12 @@ func New(opts ...Option) ai.Provider {
 	}
 }
 
+func WithBaseURL(baseURL string) Option {
+	return func(o *options) {
+		o.baseURL = baseURL
+	}
+}
+
 func WithGeminiAPIKey(apiKey string) Option {
 	return func(o *options) {
 		o.backend = genai.BackendGeminiAPI
@@ -102,14 +109,6 @@ func (*provider) Name() string {
 	return Name
 }
 
-func (a *provider) ParseOptions(data map[string]any) (ai.ProviderOptionsData, error) {
-	var options ProviderOptions
-	if err := ai.ParseOptions(data, &options); err != nil {
-		return nil, err
-	}
-	return &options, nil
-}
-
 type languageModel struct {
 	provider        string
 	modelID         string
@@ -137,6 +136,17 @@ func (g *provider) LanguageModel(modelID string) (ai.LanguageModel, error) {
 	if g.options.skipAuth {
 		cc.Credentials = &auth.Credentials{TokenProvider: dummyTokenProvider{}}
 	}
+
+	if g.options.baseURL != "" || len(g.options.headers) > 0 {
+		headers := http.Header{}
+		for k, v := range g.options.headers {
+			headers.Add(k, v)
+		}
+		cc.HTTPOptions = genai.HTTPOptions{
+			BaseURL: g.options.baseURL,
+			Headers: headers,
+		}
+	}
 	client, err := genai.NewClient(context.Background(), cc)
 	if err != nil {
 		return nil, err

google/provider_options.go 🔗

@@ -1,5 +1,7 @@
 package google
 
+import "github.com/charmbracelet/fantasy/ai"
+
 type ThinkingConfig struct {
 	ThinkingBudget  *int64 `json:"thinking_budget"`
 	IncludeThoughts *bool  `json:"include_thoughts"`
@@ -42,3 +44,11 @@ type ProviderOptions struct {
 }
 
 func (o *ProviderOptions) Options() {}
+
+func ParseOptions(data map[string]any) (*ProviderOptions, error) {
+	var options ProviderOptions
+	if err := ai.ParseOptions(data, &options); err != nil {
+		return nil, err
+	}
+	return &options, nil
+}

openai/language_model.go 🔗

@@ -21,8 +21,6 @@ type languageModel struct {
 	provider                   string
 	modelID                    string
 	client                     openai.Client
-	uniqueToolCallIds          bool
-	generateIDFunc             LanguageModelGenerateIDFunc
 	prepareCallFunc            LanguageModelPrepareCallFunc
 	mapFinishReasonFunc        LanguageModelMapFinishReasonFunc
 	extraContentFunc           LanguageModelExtraContentFunc
@@ -70,24 +68,11 @@ func WithLanguageModelStreamUsageFunc(fn LanguageModelStreamUsageFunc) LanguageM
 	}
 }
 
-func WithLanguageUniqueToolCallIds() LanguageModelOption {
-	return func(l *languageModel) {
-		l.uniqueToolCallIds = true
-	}
-}
-
-func WithLanguageModelGenerateIDFunc(fn LanguageModelGenerateIDFunc) LanguageModelOption {
-	return func(l *languageModel) {
-		l.generateIDFunc = fn
-	}
-}
-
 func newLanguageModel(modelID string, provider string, client openai.Client, opts ...LanguageModelOption) languageModel {
 	model := languageModel{
 		modelID:                    modelID,
 		provider:                   provider,
 		client:                     client,
-		generateIDFunc:             DefaultGenerateID,
 		prepareCallFunc:            DefaultPrepareCallFunc,
 		mapFinishReasonFunc:        DefaultMapFinishReasonFunc,
 		usageFunc:                  DefaultUsageFunc,
@@ -276,9 +261,6 @@ func (o languageModel) Generate(ctx context.Context, call ai.Call) (*ai.Response
 	}
 	for _, tc := range choice.Message.ToolCalls {
 		toolCallID := tc.ID
-		if toolCallID == "" || o.uniqueToolCallIds {
-			toolCallID = o.generateIDFunc()
-		}
 		content = append(content, ai.ToolCallContent{
 			ProviderExecuted: false, // TODO: update when handling other tools
 			ToolCallID:       toolCallID,
@@ -300,10 +282,14 @@ func (o languageModel) Generate(ctx context.Context, call ai.Call) (*ai.Response
 
 	usage, providerMetadata := o.usageFunc(*response)
 
+	mappedFinishReason := o.mapFinishReasonFunc(choice.FinishReason)
+	if len(choice.Message.ToolCalls) > 0 {
+		mappedFinishReason = ai.FinishReasonToolCalls
+	}
 	return &ai.Response{
 		Content:      content,
 		Usage:        usage,
-		FinishReason: DefaultMapFinishReasonFunc(choice),
+		FinishReason: mappedFinishReason,
 		ProviderMetadata: ai.ProviderMetadata{
 			Name: providerMetadata,
 		},
@@ -333,6 +319,7 @@ func (o languageModel) Stream(ctx context.Context, call ai.Call) (ai.StreamRespo
 	acc := openai.ChatCompletionAccumulator{}
 	extraContext := make(map[string]any)
 	var usage ai.Usage
+	var finishReason string
 	return func(yield func(ai.StreamPart) bool) {
 		if len(warnings) > 0 {
 			if !yield(ai.StreamPart{
@@ -350,6 +337,9 @@ func (o languageModel) Stream(ctx context.Context, call ai.Call) (ai.StreamRespo
 				continue
 			}
 			for _, choice := range chunk.Choices {
+				if choice.FinishReason != "" {
+					finishReason = choice.FinishReason
+				}
 				switch {
 				case choice.Delta.Content != "":
 					if !isActiveText {
@@ -434,15 +424,6 @@ func (o languageModel) Stream(ctx context.Context, call ai.Call) (ai.StreamRespo
 								return
 							}
 
-							// some providers do not send this as a unique id
-							// for some usecases in crush we need this ID to be unique.
-							// it won't change the behavior on the provider side because the
-							// provider only cares about the tool call id matching the result
-							// and in our case that will still be the case
-							if o.uniqueToolCallIds {
-								toolCallDelta.ID = o.generateIDFunc()
-							}
-
 							if !yield(ai.StreamPart{
 								Type:         ai.StreamPartTypeToolInputStart,
 								ID:           toolCallDelta.ID,
@@ -551,14 +532,17 @@ func (o languageModel) Stream(ctx context.Context, call ai.Call) (ai.StreamRespo
 					}
 				}
 			}
-			finishReason := ai.FinishReasonUnknown
+			mappedFinishReason := o.mapFinishReasonFunc(finishReason)
 			if len(acc.Choices) > 0 {
-				finishReason = o.mapFinishReasonFunc(acc.Choices[0])
+				choice := acc.Choices[0]
+				if len(choice.Message.ToolCalls) > 0 {
+					mappedFinishReason = ai.FinishReasonToolCalls
+				}
 			}
 			yield(ai.StreamPart{
 				Type:             ai.StreamPartTypeFinish,
 				Usage:            usage,
-				FinishReason:     finishReason,
+				FinishReason:     mappedFinishReason,
 				ProviderMetadata: providerMetadata,
 			})
 			return

openai/language_model_hooks.go 🔗

@@ -4,16 +4,14 @@ import (
 	"fmt"
 
 	"github.com/charmbracelet/fantasy/ai"
-	"github.com/google/uuid"
 	"github.com/openai/openai-go/v2"
 	"github.com/openai/openai-go/v2/packages/param"
 	"github.com/openai/openai-go/v2/shared"
 )
 
 type (
-	LanguageModelGenerateIDFunc             = func() string
 	LanguageModelPrepareCallFunc            = func(model ai.LanguageModel, params *openai.ChatCompletionNewParams, call ai.Call) ([]ai.CallWarning, error)
-	LanguageModelMapFinishReasonFunc        = func(choice openai.ChatCompletionChoice) ai.FinishReason
+	LanguageModelMapFinishReasonFunc        = func(finishReason string) ai.FinishReason
 	LanguageModelUsageFunc                  = func(choice openai.ChatCompletion) (ai.Usage, ai.ProviderOptionsData)
 	LanguageModelExtraContentFunc           = func(choice openai.ChatCompletionChoice) []ai.Content
 	LanguageModelStreamExtraFunc            = func(chunk openai.ChatCompletionChunk, yield func(ai.StreamPart) bool, ctx map[string]any) (map[string]any, bool)
@@ -21,10 +19,6 @@ type (
 	LanguageModelStreamProviderMetadataFunc = func(choice openai.ChatCompletionChoice, metadata ai.ProviderMetadata) ai.ProviderMetadata
 )
 
-func DefaultGenerateID() string {
-	return uuid.NewString()
-}
-
 func DefaultPrepareCallFunc(model ai.LanguageModel, params *openai.ChatCompletionNewParams, call ai.Call) ([]ai.CallWarning, error) {
 	if call.ProviderOptions == nil {
 		return nil, nil
@@ -162,8 +156,7 @@ func DefaultPrepareCallFunc(model ai.LanguageModel, params *openai.ChatCompletio
 	return warnings, nil
 }
 
-func DefaultMapFinishReasonFunc(choice openai.ChatCompletionChoice) ai.FinishReason {
-	finishReason := choice.FinishReason
+func DefaultMapFinishReasonFunc(finishReason string) ai.FinishReason {
 	switch finishReason {
 	case "stop":
 		return ai.FinishReasonStop

openai/openai.go 🔗

@@ -137,14 +137,6 @@ func (o *provider) LanguageModel(modelID string) (ai.LanguageModel, error) {
 	), nil
 }
 
-func (o *provider) ParseOptions(data map[string]any) (ai.ProviderOptionsData, error) {
-	var options ProviderOptions
-	if err := ai.ParseOptions(data, &options); err != nil {
-		return nil, err
-	}
-	return &options, nil
-}
-
 func (o *provider) Name() string {
 	return Name
 }

openai/provider_options.go 🔗

@@ -63,3 +63,11 @@ func NewProviderFileOptions(opts *ProviderFileOptions) ai.ProviderOptions {
 		Name: opts,
 	}
 }
+
+func ParseOptions(data map[string]any) (*ProviderOptions, error) {
+	var options ProviderOptions
+	if err := ai.ParseOptions(data, &options); err != nil {
+		return nil, err
+	}
+	return &options, nil
+}

openaicompat/openaicompat.go 🔗

@@ -76,15 +76,3 @@ func WithSDKOptions(opts ...option.RequestOption) Option {
 		o.sdkOptions = append(o.sdkOptions, opts...)
 	}
 }
-
-func WithLanguageUniqueToolCallIds() Option {
-	return func(l *options) {
-		l.languageModelOptions = append(l.languageModelOptions, openai.WithLanguageUniqueToolCallIds())
-	}
-}
-
-func WithLanguageModelGenerateIDFunc(fn openai.LanguageModelGenerateIDFunc) Option {
-	return func(l *options) {
-		l.languageModelOptions = append(l.languageModelOptions, openai.WithLanguageModelGenerateIDFunc(fn))
-	}
-}

openaicompat/provider_options.go 🔗

@@ -21,3 +21,11 @@ func NewProviderOptions(opts *ProviderOptions) ai.ProviderOptions {
 		Name: opts,
 	}
 }
+
+func ParseOptions(data map[string]any) (*ProviderOptions, error) {
+	var options ProviderOptions
+	if err := ai.ParseOptions(data, &options); err != nil {
+		return nil, err
+	}
+	return &options, nil
+}

openrouter/language_model_hooks.go 🔗

@@ -67,29 +67,6 @@ func languagePrepareModelCall(model ai.LanguageModel, params *openaisdk.ChatComp
 	return nil, nil
 }
 
-func languageModelMapFinishReason(choice openaisdk.ChatCompletionChoice) ai.FinishReason {
-	finishReason := choice.FinishReason
-	switch finishReason {
-	case "stop":
-		return ai.FinishReasonStop
-	case "length":
-		return ai.FinishReasonLength
-	case "content_filter":
-		return ai.FinishReasonContentFilter
-	case "function_call", "tool_calls":
-		return ai.FinishReasonToolCalls
-	default:
-		// for streaming responses the openai accumulator is not working as expected with some provider
-		// therefore it is sending no finish reason so we need to manually handle it
-		if len(choice.Message.ToolCalls) > 0 {
-			return ai.FinishReasonToolCalls
-		} else if finishReason == "" {
-			return ai.FinishReasonStop
-		}
-		return ai.FinishReasonUnknown
-	}
-}
-
 func languageModelExtraContent(choice openaisdk.ChatCompletionChoice) []ai.Content {
 	var content []ai.Content
 	reasoningData := ReasoningData{}

openrouter/openrouter.go 🔗

@@ -32,7 +32,6 @@ func New(opts ...Option) ai.Provider {
 			openai.WithLanguageModelStreamUsageFunc(languageModelStreamUsage),
 			openai.WithLanguageModelStreamExtraFunc(languageModelStreamExtra),
 			openai.WithLanguageModelExtraContentFunc(languageModelExtraContent),
-			openai.WithLanguageModelMapFinishReasonFunc(languageModelMapFinishReason),
 		},
 	}
 	for _, o := range opts {
@@ -67,18 +66,6 @@ func WithHTTPClient(client option.HTTPClient) Option {
 	}
 }
 
-func WithLanguageUniqueToolCallIds() Option {
-	return func(l *options) {
-		l.languageModelOptions = append(l.languageModelOptions, openai.WithLanguageUniqueToolCallIds())
-	}
-}
-
-func WithLanguageModelGenerateIDFunc(fn openai.LanguageModelGenerateIDFunc) Option {
-	return func(l *options) {
-		l.languageModelOptions = append(l.languageModelOptions, openai.WithLanguageModelGenerateIDFunc(fn))
-	}
-}
-
 func structToMapJSON(s any) (map[string]any, error) {
 	var result map[string]any
 	jsonBytes, err := json.Marshal(s)

openrouter/provider_options.go 🔗

@@ -121,3 +121,11 @@ func NewProviderOptions(opts *ProviderOptions) ai.ProviderOptions {
 		Name: opts,
 	}
 }
+
+func ParseOptions(data map[string]any) (*ProviderOptions, error) {
+	var options ProviderOptions
+	if err := ai.ParseOptions(data, &options); err != nil {
+		return nil, err
+	}
+	return &options, nil
+}

providertests/openrouter_test.go 🔗

@@ -1,12 +1,8 @@
 package providertests
 
 import (
-	"context"
-	"fmt"
 	"net/http"
 	"os"
-	"strconv"
-	"strings"
 	"testing"
 
 	"github.com/charmbracelet/fantasy/ai"
@@ -110,106 +106,6 @@ func testOpenrouterThinking(t *testing.T, result *ai.AgentResult) {
 	require.Greater(t, reasoningContentCount, 0)
 }
 
-func TestOpenRouterWithUniqueToolCallIDs(t *testing.T) {
-	type CalculatorInput struct {
-		A int `json:"a" description:"first number"`
-		B int `json:"b" description:"second number"`
-	}
-
-	addTool := ai.NewAgentTool(
-		"add",
-		"Add two numbers",
-		func(ctx context.Context, input CalculatorInput, _ ai.ToolCall) (ai.ToolResponse, error) {
-			result := input.A + input.B
-			return ai.NewTextResponse(strings.TrimSpace(strconv.Itoa(result))), nil
-		},
-	)
-	multiplyTool := ai.NewAgentTool(
-		"multiply",
-		"Multiply two numbers",
-		func(ctx context.Context, input CalculatorInput, _ ai.ToolCall) (ai.ToolResponse, error) {
-			result := input.A * input.B
-			return ai.NewTextResponse(strings.TrimSpace(strconv.Itoa(result))), nil
-		},
-	)
-	checkResult := func(t *testing.T, result *ai.AgentResult) {
-		require.Len(t, result.Steps, 2)
-
-		var toolCalls []ai.ToolCallContent
-		for _, content := range result.Steps[0].Content {
-			if content.GetType() == ai.ContentTypeToolCall {
-				toolCalls = append(toolCalls, content.(ai.ToolCallContent))
-			}
-		}
-		for _, tc := range toolCalls {
-			require.False(t, tc.Invalid)
-			require.Contains(t, tc.ToolCallID, "test-")
-		}
-		require.Len(t, toolCalls, 2)
-
-		finalText := result.Response.Content.Text()
-		require.Contains(t, finalText, "5", "expected response to contain '5', got: %q", finalText)
-		require.Contains(t, finalText, "6", "expected response to contain '6', got: %q", finalText)
-	}
-
-	id := 0
-	generateIDFunc := func() string {
-		id += 1
-		return fmt.Sprintf("test-%d", id)
-	}
-
-	t.Run("unique tool call ids", func(t *testing.T) {
-		r := newRecorder(t)
-
-		provider := openrouter.New(
-			openrouter.WithAPIKey(os.Getenv("FANTASY_OPENROUTER_API_KEY")),
-			openrouter.WithHTTPClient(&http.Client{Transport: r}),
-			openrouter.WithLanguageUniqueToolCallIds(),
-			openrouter.WithLanguageModelGenerateIDFunc(generateIDFunc),
-		)
-		languageModel, err := provider.LanguageModel("moonshotai/kimi-k2-0905")
-		require.NoError(t, err, "failed to build language model")
-
-		agent := ai.NewAgent(
-			languageModel,
-			ai.WithSystemPrompt("You are a helpful assistant. CRITICAL: Always use both add and multiply at the same time ALWAYS."),
-			ai.WithTools(addTool),
-			ai.WithTools(multiplyTool),
-		)
-		result, err := agent.Generate(t.Context(), ai.AgentCall{
-			Prompt:          "Add and multiply the number 2 and 3",
-			MaxOutputTokens: ai.IntOption(4000),
-		})
-		require.NoError(t, err, "failed to generate")
-		checkResult(t, result)
-	})
-	t.Run("stream unique tool call ids", func(t *testing.T) {
-		r := newRecorder(t)
-
-		provider := openrouter.New(
-			openrouter.WithAPIKey(os.Getenv("FANTASY_OPENROUTER_API_KEY")),
-			openrouter.WithHTTPClient(&http.Client{Transport: r}),
-			openrouter.WithLanguageUniqueToolCallIds(),
-			openrouter.WithLanguageModelGenerateIDFunc(generateIDFunc),
-		)
-		languageModel, err := provider.LanguageModel("moonshotai/kimi-k2-0905")
-		require.NoError(t, err, "failed to build language model")
-
-		agent := ai.NewAgent(
-			languageModel,
-			ai.WithSystemPrompt("You are a helpful assistant. Always use both add and multiply at the same time."),
-			ai.WithTools(addTool),
-			ai.WithTools(multiplyTool),
-		)
-		result, err := agent.Stream(t.Context(), ai.AgentStreamCall{
-			Prompt:          "Add and multiply the number 2 and 3",
-			MaxOutputTokens: ai.IntOption(4000),
-		})
-		require.NoError(t, err, "failed to generate")
-		checkResult(t, result)
-	})
-}
-
 func openrouterBuilder(model string) builderFunc {
 	return func(r *recorder.Recorder) (ai.LanguageModel, error) {
 		provider := openrouter.New(

providertests/testdata/TestOpenRouterWithUniqueToolCallIDs/stream_unique_tool_call_ids.yaml 🔗

@@ -1,223 +0,0 @@
----
-version: 2
-interactions:
-- id: 0
-  request:
-    proto: HTTP/1.1
-    proto_major: 1
-    proto_minor: 1
-    content_length: 890
-    host: ""
-    body: '{"messages":[{"content":"You are a helpful assistant. Always use both add and multiply at the same time.","role":"system"},{"content":"Add and multiply the number 2 and 3","role":"user"}],"model":"moonshotai/kimi-k2-0905","max_tokens":4000,"stream_options":{"include_usage":true},"tool_choice":"auto","tools":[{"function":{"name":"add","strict":false,"description":"Add two numbers","parameters":{"properties":{"a":{"description":"first number","type":"integer"},"b":{"description":"second number","type":"integer"}},"required":["a","b"],"type":"object"}},"type":"function"},{"function":{"name":"multiply","strict":false,"description":"Multiply two numbers","parameters":{"properties":{"a":{"description":"first number","type":"integer"},"b":{"description":"second number","type":"integer"}},"required":["a","b"],"type":"object"}},"type":"function"}],"usage":{"include":true},"stream":true}'
-    headers:
-      Accept:
-      - application/json
-      Content-Type:
-      - application/json
-      User-Agent:
-      - OpenAI/Go 2.3.0
-    url: https://openrouter.ai/api/v1/chat/completions
-    method: POST
-  response:
-    proto: HTTP/2.0
-    proto_major: 2
-    proto_minor: 0
-    content_length: -1
-    body: |+
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":"I'll"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":" perform"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":" both"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":" the"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":" addition"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":" and"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":" multiplication"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":" of"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":" "},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":"2"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":" and"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":" "},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":"3"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":" for"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":" you"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":"."},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"id":"add:0","type":"function","function":{"name":"add"}}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"function":{"arguments":"{\""},"type":"function"}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"function":{"arguments":"a"},"type":"function"}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"function":{"arguments":"\":"},"type":"function"}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"function":{"arguments":" "},"type":"function"}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"function":{"arguments":"2"},"type":"function"}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"function":{"arguments":","},"type":"function"}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"function":{"arguments":" \""},"type":"function"}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"function":{"arguments":"b"},"type":"function"}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"function":{"arguments":"\":"},"type":"function"}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"function":{"arguments":" "},"type":"function"}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"function":{"arguments":"3"},"type":"function"}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"function":{"arguments":"}"},"type":"function"}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":1,"id":"multiply:1","type":"function","function":{"name":"multiply"}}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":1,"function":{"arguments":"{\""},"type":"function"}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":1,"function":{"arguments":"a"},"type":"function"}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":1,"function":{"arguments":"\":"},"type":"function"}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":1,"function":{"arguments":" "},"type":"function"}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":1,"function":{"arguments":"2"},"type":"function"}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":1,"function":{"arguments":","},"type":"function"}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":1,"function":{"arguments":" \""},"type":"function"}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":1,"function":{"arguments":"b"},"type":"function"}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":1,"function":{"arguments":"\":"},"type":"function"}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":1,"function":{"arguments":" "},"type":"function"}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":1,"function":{"arguments":"3"},"type":"function"}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":1,"function":{"arguments":"}"},"type":"function"}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":"tool_calls","native_finish_reason":"tool_calls","logprobs":null}],"system_fingerprint":"fpv0_ef28f882"}
-
-      data: {"id":"gen-1758885103-JQ2y9RAscNfolyFYU5K7","provider":"Novita","model":"moonshotai/kimi-k2-0905","object":"chat.completion.chunk","created":1758885103,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"usage":{"prompt_tokens":154,"completion_tokens":56,"total_tokens":210,"cost":0.0002324,"is_byok":false,"prompt_tokens_details":{"cached_tokens":0,"audio_tokens":0},"cost_details":{"upstream_inference_cost":null,"upstream_inference_prompt_cost":0.0000924,"upstream_inference_completions_cost":0.00014},"completion_tokens_details":{"reasoning_tokens":0,"image_tokens":0}}}
-
-      data: [DONE]
-
-    headers:
-      Content-Type:
-      - text/event-stream
-    status: 200 OK
-    code: 200
-    duration: 1.595751792s
-- id: 1
-  request:
-    proto: HTTP/1.1
-    proto_major: 1
-    proto_minor: 1
-    content_length: 1311
-    host: ""

providertests/testdata/TestOpenRouterWithUniqueToolCallIDs/unique_tool_call_ids.yaml 🔗

@@ -1,92 +0,0 @@
----
-version: 2
-interactions:
-- id: 0
-  request:
-    proto: HTTP/1.1
-    proto_major: 1
-    proto_minor: 1
-    content_length: 853
-    host: ""
-    body: '{"messages":[{"content":"You are a helpful assistant. CRITICAL: Always use both add and multiply at the same time ALWAYS.","role":"system"},{"content":"Add and multiply the number 2 and 3","role":"user"}],"model":"moonshotai/kimi-k2-0905","max_tokens":4000,"tool_choice":"auto","tools":[{"function":{"name":"add","strict":false,"description":"Add two numbers","parameters":{"properties":{"a":{"description":"first number","type":"integer"},"b":{"description":"second number","type":"integer"}},"required":["a","b"],"type":"object"}},"type":"function"},{"function":{"name":"multiply","strict":false,"description":"Multiply two numbers","parameters":{"properties":{"a":{"description":"first number","type":"integer"},"b":{"description":"second number","type":"integer"}},"required":["a","b"],"type":"object"}},"type":"function"}],"usage":{"include":true}}'
-    headers:
-      Accept:
-      - application/json
-      Content-Type:
-      - application/json
-      User-Agent:
-      - OpenAI/Go 2.3.0
-    url: https://openrouter.ai/api/v1/chat/completions
-    method: POST
-  response:
-    proto: HTTP/2.0
-    proto_major: 2
-    proto_minor: 0
-    content_length: -1
-    body: '{"error":{"message":"Provider returned error","code":429,"metadata":{"raw":"moonshotai/kimi-k2-0905 is temporarily rate-limited upstream. Please retry shortly, or add your own key to accumulate your rate limits: https://openrouter.ai/settings/integrations","provider_name":"GMICloud"}},"user_id":"user_2zMGmKqlf4zmAvL9snVImB1Z1ZQ"}'
-    headers:
-      Content-Type:
-      - application/json
-    status: 429 Too Many Requests
-    code: 429
-    duration: 919.648708ms
-- id: 1
-  request:
-    proto: HTTP/1.1
-    proto_major: 1
-    proto_minor: 1
-    content_length: 853
-    host: ""
-    body: '{"messages":[{"content":"You are a helpful assistant. CRITICAL: Always use both add and multiply at the same time ALWAYS.","role":"system"},{"content":"Add and multiply the number 2 and 3","role":"user"}],"model":"moonshotai/kimi-k2-0905","max_tokens":4000,"tool_choice":"auto","tools":[{"function":{"name":"add","strict":false,"description":"Add two numbers","parameters":{"properties":{"a":{"description":"first number","type":"integer"},"b":{"description":"second number","type":"integer"}},"required":["a","b"],"type":"object"}},"type":"function"},{"function":{"name":"multiply","strict":false,"description":"Multiply two numbers","parameters":{"properties":{"a":{"description":"first number","type":"integer"},"b":{"description":"second number","type":"integer"}},"required":["a","b"],"type":"object"}},"type":"function"}],"usage":{"include":true}}'
-    headers:
-      Accept:
-      - application/json
-      Content-Type:
-      - application/json
-      User-Agent:
-      - OpenAI/Go 2.3.0
-    url: https://openrouter.ai/api/v1/chat/completions
-    method: POST
-  response:
-    proto: HTTP/2.0
-    proto_major: 2
-    proto_minor: 0
-    content_length: -1
-    uncompressed: true