responses_language_model.go

   1package openai
   2
   3import (
   4	"context"
   5	"encoding/base64"
   6	"encoding/json"
   7	"errors"
   8	"fmt"
   9	"reflect"
  10	"strings"
  11
  12	"charm.land/fantasy"
  13	"charm.land/fantasy/object"
  14	"charm.land/fantasy/schema"
  15	"github.com/google/uuid"
  16	"github.com/openai/openai-go/v3"
  17	"github.com/openai/openai-go/v3/packages/param"
  18	"github.com/openai/openai-go/v3/responses"
  19	"github.com/openai/openai-go/v3/shared"
  20)
  21
  22const topLogprobsMax = 20
  23
  24type responsesLanguageModel struct {
  25	provider           string
  26	modelID            string
  27	client             openai.Client
  28	objectMode         fantasy.ObjectMode
  29	noDefaultUserAgent bool
  30}
  31
  32// newResponsesLanguageModel implements a responses api model.
  33func newResponsesLanguageModel(modelID string, provider string, client openai.Client, objectMode fantasy.ObjectMode, noDefaultUserAgent bool) responsesLanguageModel {
  34	return responsesLanguageModel{
  35		modelID:            modelID,
  36		provider:           provider,
  37		client:             client,
  38		objectMode:         objectMode,
  39		noDefaultUserAgent: noDefaultUserAgent,
  40	}
  41}
  42
  43func (o responsesLanguageModel) Model() string {
  44	return o.modelID
  45}
  46
  47func (o responsesLanguageModel) Provider() string {
  48	return o.provider
  49}
  50
  51type responsesModelConfig struct {
  52	isReasoningModel           bool
  53	systemMessageMode          string
  54	requiredAutoTruncation     bool
  55	supportsFlexProcessing     bool
  56	supportsPriorityProcessing bool
  57}
  58
  59func getResponsesModelConfig(modelID string) responsesModelConfig {
  60	supportsFlexProcessing := strings.HasPrefix(modelID, "o3") ||
  61		strings.Contains(modelID, "-o3") || strings.Contains(modelID, "o4-mini") ||
  62		(strings.Contains(modelID, "gpt-5") && !strings.Contains(modelID, "gpt-5-chat"))
  63
  64	supportsPriorityProcessing := strings.Contains(modelID, "gpt-4") ||
  65		strings.Contains(modelID, "gpt-5-mini") ||
  66		(strings.Contains(modelID, "gpt-5") &&
  67			!strings.Contains(modelID, "gpt-5-nano") &&
  68			!strings.Contains(modelID, "gpt-5-chat")) ||
  69		strings.HasPrefix(modelID, "o3") ||
  70		strings.Contains(modelID, "-o3") ||
  71		strings.Contains(modelID, "o4-mini")
  72
  73	defaults := responsesModelConfig{
  74		requiredAutoTruncation:     false,
  75		systemMessageMode:          "system",
  76		supportsFlexProcessing:     supportsFlexProcessing,
  77		supportsPriorityProcessing: supportsPriorityProcessing,
  78	}
  79
  80	if strings.Contains(modelID, "gpt-5-chat") {
  81		return responsesModelConfig{
  82			isReasoningModel:           false,
  83			systemMessageMode:          defaults.systemMessageMode,
  84			requiredAutoTruncation:     defaults.requiredAutoTruncation,
  85			supportsFlexProcessing:     defaults.supportsFlexProcessing,
  86			supportsPriorityProcessing: defaults.supportsPriorityProcessing,
  87		}
  88	}
  89
  90	if strings.HasPrefix(modelID, "o1") || strings.Contains(modelID, "-o1") ||
  91		strings.HasPrefix(modelID, "o3") || strings.Contains(modelID, "-o3") ||
  92		strings.HasPrefix(modelID, "o4") || strings.Contains(modelID, "-o4") ||
  93		strings.HasPrefix(modelID, "oss") || strings.Contains(modelID, "-oss") ||
  94		strings.Contains(modelID, "gpt-5") || strings.Contains(modelID, "codex-") ||
  95		strings.Contains(modelID, "computer-use") {
  96		if strings.Contains(modelID, "o1-mini") || strings.Contains(modelID, "o1-preview") {
  97			return responsesModelConfig{
  98				isReasoningModel:           true,
  99				systemMessageMode:          "remove",
 100				requiredAutoTruncation:     defaults.requiredAutoTruncation,
 101				supportsFlexProcessing:     defaults.supportsFlexProcessing,
 102				supportsPriorityProcessing: defaults.supportsPriorityProcessing,
 103			}
 104		}
 105
 106		return responsesModelConfig{
 107			isReasoningModel:           true,
 108			systemMessageMode:          "developer",
 109			requiredAutoTruncation:     defaults.requiredAutoTruncation,
 110			supportsFlexProcessing:     defaults.supportsFlexProcessing,
 111			supportsPriorityProcessing: defaults.supportsPriorityProcessing,
 112		}
 113	}
 114
 115	return responsesModelConfig{
 116		isReasoningModel:           false,
 117		systemMessageMode:          defaults.systemMessageMode,
 118		requiredAutoTruncation:     defaults.requiredAutoTruncation,
 119		supportsFlexProcessing:     defaults.supportsFlexProcessing,
 120		supportsPriorityProcessing: defaults.supportsPriorityProcessing,
 121	}
 122}
 123
 124const previousResponseIDHistoryError = "cannot combine previous_response_id with replayed conversation history; use either previous_response_id (server-side chaining) or explicit message replay, not both"
 125const previousResponseIDStoreError = "previous_response_id requires store to be true; the current response will not be stored and cannot be used for further chaining"
 126
 127func (o responsesLanguageModel) prepareParams(call fantasy.Call) (*responses.ResponseNewParams, []fantasy.CallWarning, error) {
 128	var warnings []fantasy.CallWarning
 129	params := &responses.ResponseNewParams{}
 130
 131	modelConfig := getResponsesModelConfig(o.modelID)
 132
 133	if call.TopK != nil {
 134		warnings = append(warnings, fantasy.CallWarning{
 135			Type:    fantasy.CallWarningTypeUnsupportedSetting,
 136			Setting: "topK",
 137		})
 138	}
 139
 140	if call.PresencePenalty != nil {
 141		warnings = append(warnings, fantasy.CallWarning{
 142			Type:    fantasy.CallWarningTypeUnsupportedSetting,
 143			Setting: "presencePenalty",
 144		})
 145	}
 146
 147	if call.FrequencyPenalty != nil {
 148		warnings = append(warnings, fantasy.CallWarning{
 149			Type:    fantasy.CallWarningTypeUnsupportedSetting,
 150			Setting: "frequencyPenalty",
 151		})
 152	}
 153
 154	var openaiOptions *ResponsesProviderOptions
 155	if opts, ok := call.ProviderOptions[Name]; ok {
 156		if typedOpts, ok := opts.(*ResponsesProviderOptions); ok {
 157			openaiOptions = typedOpts
 158		}
 159	}
 160
 161	if openaiOptions != nil && openaiOptions.Store != nil {
 162		params.Store = param.NewOpt(*openaiOptions.Store)
 163	} else {
 164		params.Store = param.NewOpt(false)
 165	}
 166
 167	if openaiOptions != nil && openaiOptions.PreviousResponseID != nil && *openaiOptions.PreviousResponseID != "" {
 168		if err := validatePreviousResponseIDPrompt(call.Prompt); err != nil {
 169			return nil, warnings, err
 170		}
 171		if openaiOptions.Store == nil || !*openaiOptions.Store {
 172			return nil, warnings, errors.New(previousResponseIDStoreError)
 173		}
 174		params.PreviousResponseID = param.NewOpt(*openaiOptions.PreviousResponseID)
 175	}
 176
 177	input, inputWarnings := toResponsesPrompt(call.Prompt, modelConfig.systemMessageMode)
 178	warnings = append(warnings, inputWarnings...)
 179
 180	var include []IncludeType
 181
 182	addInclude := func(key IncludeType) {
 183		include = append(include, key)
 184	}
 185
 186	topLogprobs := 0
 187	if openaiOptions != nil && openaiOptions.Logprobs != nil {
 188		switch v := openaiOptions.Logprobs.(type) {
 189		case bool:
 190			if v {
 191				topLogprobs = topLogprobsMax
 192			}
 193		case float64:
 194			topLogprobs = int(v)
 195		case int:
 196			topLogprobs = v
 197		}
 198	}
 199
 200	if topLogprobs > 0 {
 201		addInclude(IncludeMessageOutputTextLogprobs)
 202	}
 203
 204	params.Model = o.modelID
 205	params.Input = responses.ResponseNewParamsInputUnion{
 206		OfInputItemList: input,
 207	}
 208
 209	if call.Temperature != nil {
 210		params.Temperature = param.NewOpt(*call.Temperature)
 211	}
 212	if call.TopP != nil {
 213		params.TopP = param.NewOpt(*call.TopP)
 214	}
 215	if call.MaxOutputTokens != nil {
 216		params.MaxOutputTokens = param.NewOpt(*call.MaxOutputTokens)
 217	}
 218
 219	if openaiOptions != nil {
 220		if openaiOptions.MaxToolCalls != nil {
 221			params.MaxToolCalls = param.NewOpt(*openaiOptions.MaxToolCalls)
 222		}
 223		if openaiOptions.Metadata != nil {
 224			metadata := make(shared.Metadata)
 225			for k, v := range openaiOptions.Metadata {
 226				if str, ok := v.(string); ok {
 227					metadata[k] = str
 228				}
 229			}
 230			params.Metadata = metadata
 231		}
 232		if openaiOptions.ParallelToolCalls != nil {
 233			params.ParallelToolCalls = param.NewOpt(*openaiOptions.ParallelToolCalls)
 234		}
 235		if openaiOptions.User != nil {
 236			params.User = param.NewOpt(*openaiOptions.User)
 237		}
 238		if openaiOptions.Instructions != nil {
 239			params.Instructions = param.NewOpt(*openaiOptions.Instructions)
 240		}
 241		if openaiOptions.ServiceTier != nil {
 242			params.ServiceTier = responses.ResponseNewParamsServiceTier(*openaiOptions.ServiceTier)
 243		}
 244		if openaiOptions.PromptCacheKey != nil {
 245			params.PromptCacheKey = param.NewOpt(*openaiOptions.PromptCacheKey)
 246		}
 247		if openaiOptions.SafetyIdentifier != nil {
 248			params.SafetyIdentifier = param.NewOpt(*openaiOptions.SafetyIdentifier)
 249		}
 250		if topLogprobs > 0 {
 251			params.TopLogprobs = param.NewOpt(int64(topLogprobs))
 252		}
 253
 254		if len(openaiOptions.Include) > 0 {
 255			include = append(include, openaiOptions.Include...)
 256		}
 257
 258		if modelConfig.isReasoningModel && (openaiOptions.ReasoningEffort != nil || openaiOptions.ReasoningSummary != nil) {
 259			reasoning := shared.ReasoningParam{}
 260			if openaiOptions.ReasoningEffort != nil {
 261				reasoning.Effort = shared.ReasoningEffort(*openaiOptions.ReasoningEffort)
 262			}
 263			if openaiOptions.ReasoningSummary != nil {
 264				reasoning.Summary = shared.ReasoningSummary(*openaiOptions.ReasoningSummary)
 265			}
 266			params.Reasoning = reasoning
 267		}
 268	}
 269
 270	if modelConfig.requiredAutoTruncation {
 271		params.Truncation = responses.ResponseNewParamsTruncationAuto
 272	}
 273
 274	if len(include) > 0 {
 275		includeParams := make([]responses.ResponseIncludable, len(include))
 276		for i, inc := range include {
 277			includeParams[i] = responses.ResponseIncludable(string(inc))
 278		}
 279		params.Include = includeParams
 280	}
 281
 282	if modelConfig.isReasoningModel {
 283		if call.Temperature != nil {
 284			params.Temperature = param.Opt[float64]{}
 285			warnings = append(warnings, fantasy.CallWarning{
 286				Type:    fantasy.CallWarningTypeUnsupportedSetting,
 287				Setting: "temperature",
 288				Details: "temperature is not supported for reasoning models",
 289			})
 290		}
 291
 292		if call.TopP != nil {
 293			params.TopP = param.Opt[float64]{}
 294			warnings = append(warnings, fantasy.CallWarning{
 295				Type:    fantasy.CallWarningTypeUnsupportedSetting,
 296				Setting: "topP",
 297				Details: "topP is not supported for reasoning models",
 298			})
 299		}
 300	} else {
 301		if openaiOptions != nil {
 302			if openaiOptions.ReasoningEffort != nil {
 303				warnings = append(warnings, fantasy.CallWarning{
 304					Type:    fantasy.CallWarningTypeUnsupportedSetting,
 305					Setting: "reasoningEffort",
 306					Details: "reasoningEffort is not supported for non-reasoning models",
 307				})
 308			}
 309
 310			if openaiOptions.ReasoningSummary != nil {
 311				warnings = append(warnings, fantasy.CallWarning{
 312					Type:    fantasy.CallWarningTypeUnsupportedSetting,
 313					Setting: "reasoningSummary",
 314					Details: "reasoningSummary is not supported for non-reasoning models",
 315				})
 316			}
 317		}
 318	}
 319
 320	if openaiOptions != nil && openaiOptions.ServiceTier != nil {
 321		if *openaiOptions.ServiceTier == ServiceTierFlex && !modelConfig.supportsFlexProcessing {
 322			warnings = append(warnings, fantasy.CallWarning{
 323				Type:    fantasy.CallWarningTypeUnsupportedSetting,
 324				Setting: "serviceTier",
 325				Details: "flex processing is only available for o3, o4-mini, and gpt-5 models",
 326			})
 327			params.ServiceTier = ""
 328		}
 329
 330		if *openaiOptions.ServiceTier == ServiceTierPriority && !modelConfig.supportsPriorityProcessing {
 331			warnings = append(warnings, fantasy.CallWarning{
 332				Type:    fantasy.CallWarningTypeUnsupportedSetting,
 333				Setting: "serviceTier",
 334				Details: "priority processing is only available for supported models (gpt-4, gpt-5, gpt-5-mini, o3, o4-mini) and requires Enterprise access. gpt-5-nano is not supported",
 335			})
 336			params.ServiceTier = ""
 337		}
 338	}
 339
 340	tools, toolChoice, toolWarnings := toResponsesTools(call.Tools, call.ToolChoice, openaiOptions)
 341	warnings = append(warnings, toolWarnings...)
 342
 343	if len(tools) > 0 {
 344		params.Tools = tools
 345		params.ToolChoice = toolChoice
 346	}
 347
 348	return params, warnings, nil
 349}
 350
 351func validatePreviousResponseIDPrompt(prompt fantasy.Prompt) error {
 352	for _, msg := range prompt {
 353		switch msg.Role {
 354		case fantasy.MessageRoleSystem, fantasy.MessageRoleUser:
 355			continue
 356		default:
 357			return errors.New(previousResponseIDHistoryError)
 358		}
 359	}
 360	return nil
 361}
 362
 363func responsesProviderMetadata(responseID string) fantasy.ProviderMetadata {
 364	if responseID == "" {
 365		return fantasy.ProviderMetadata{}
 366	}
 367
 368	return fantasy.ProviderMetadata{
 369		Name: &ResponsesProviderMetadata{
 370			ResponseID: responseID,
 371		},
 372	}
 373}
 374
 375func responsesUsage(resp responses.Response) fantasy.Usage {
 376	usage := fantasy.Usage{
 377		InputTokens:  resp.Usage.InputTokens,
 378		OutputTokens: resp.Usage.OutputTokens,
 379		TotalTokens:  resp.Usage.InputTokens + resp.Usage.OutputTokens,
 380	}
 381	if resp.Usage.OutputTokensDetails.ReasoningTokens != 0 {
 382		usage.ReasoningTokens = resp.Usage.OutputTokensDetails.ReasoningTokens
 383	}
 384	if resp.Usage.InputTokensDetails.CachedTokens != 0 {
 385		usage.CacheReadTokens = resp.Usage.InputTokensDetails.CachedTokens
 386	}
 387	return usage
 388}
 389
 390func toResponsesPrompt(prompt fantasy.Prompt, systemMessageMode string) (responses.ResponseInputParam, []fantasy.CallWarning) {
 391	var input responses.ResponseInputParam
 392	var warnings []fantasy.CallWarning
 393
 394	for _, msg := range prompt {
 395		switch msg.Role {
 396		case fantasy.MessageRoleSystem:
 397			var systemText string
 398			for _, c := range msg.Content {
 399				if c.GetType() != fantasy.ContentTypeText {
 400					warnings = append(warnings, fantasy.CallWarning{
 401						Type:    fantasy.CallWarningTypeOther,
 402						Message: "system prompt can only have text content",
 403					})
 404					continue
 405				}
 406				textPart, ok := fantasy.AsContentType[fantasy.TextPart](c)
 407				if !ok {
 408					warnings = append(warnings, fantasy.CallWarning{
 409						Type:    fantasy.CallWarningTypeOther,
 410						Message: "system prompt text part does not have the right type",
 411					})
 412					continue
 413				}
 414				if strings.TrimSpace(textPart.Text) != "" {
 415					systemText += textPart.Text
 416				}
 417			}
 418
 419			if systemText == "" {
 420				warnings = append(warnings, fantasy.CallWarning{
 421					Type:    fantasy.CallWarningTypeOther,
 422					Message: "system prompt has no text parts",
 423				})
 424				continue
 425			}
 426
 427			switch systemMessageMode {
 428			case "system":
 429				input = append(input, responses.ResponseInputItemParamOfMessage(systemText, responses.EasyInputMessageRoleSystem))
 430			case "developer":
 431				input = append(input, responses.ResponseInputItemParamOfMessage(systemText, responses.EasyInputMessageRoleDeveloper))
 432			case "remove":
 433				warnings = append(warnings, fantasy.CallWarning{
 434					Type:    fantasy.CallWarningTypeOther,
 435					Message: "system messages are removed for this model",
 436				})
 437			}
 438
 439		case fantasy.MessageRoleUser:
 440			var contentParts responses.ResponseInputMessageContentListParam
 441			for i, c := range msg.Content {
 442				switch c.GetType() {
 443				case fantasy.ContentTypeText:
 444					textPart, ok := fantasy.AsContentType[fantasy.TextPart](c)
 445					if !ok {
 446						warnings = append(warnings, fantasy.CallWarning{
 447							Type:    fantasy.CallWarningTypeOther,
 448							Message: "user message text part does not have the right type",
 449						})
 450						continue
 451					}
 452					contentParts = append(contentParts, responses.ResponseInputContentUnionParam{
 453						OfInputText: &responses.ResponseInputTextParam{
 454							Type: "input_text",
 455							Text: textPart.Text,
 456						},
 457					})
 458
 459				case fantasy.ContentTypeFile:
 460					filePart, ok := fantasy.AsContentType[fantasy.FilePart](c)
 461					if !ok {
 462						warnings = append(warnings, fantasy.CallWarning{
 463							Type:    fantasy.CallWarningTypeOther,
 464							Message: "user message file part does not have the right type",
 465						})
 466						continue
 467					}
 468
 469					if strings.HasPrefix(filePart.MediaType, "image/") {
 470						base64Encoded := base64.StdEncoding.EncodeToString(filePart.Data)
 471						imageURL := fmt.Sprintf("data:%s;base64,%s", filePart.MediaType, base64Encoded)
 472						contentParts = append(contentParts, responses.ResponseInputContentUnionParam{
 473							OfInputImage: &responses.ResponseInputImageParam{
 474								Type:     "input_image",
 475								ImageURL: param.NewOpt(imageURL),
 476							},
 477						})
 478					} else if filePart.MediaType == "application/pdf" {
 479						base64Encoded := base64.StdEncoding.EncodeToString(filePart.Data)
 480						fileData := fmt.Sprintf("data:application/pdf;base64,%s", base64Encoded)
 481						filename := filePart.Filename
 482						if filename == "" {
 483							filename = fmt.Sprintf("part-%d.pdf", i)
 484						}
 485						contentParts = append(contentParts, responses.ResponseInputContentUnionParam{
 486							OfInputFile: &responses.ResponseInputFileParam{
 487								Type:     "input_file",
 488								Filename: param.NewOpt(filename),
 489								FileData: param.NewOpt(fileData),
 490							},
 491						})
 492					} else {
 493						warnings = append(warnings, fantasy.CallWarning{
 494							Type:    fantasy.CallWarningTypeOther,
 495							Message: fmt.Sprintf("file part media type %s not supported", filePart.MediaType),
 496						})
 497					}
 498				}
 499			}
 500
 501			if !hasVisibleResponsesUserContent(contentParts) {
 502				warnings = append(warnings, fantasy.CallWarning{
 503					Type:    fantasy.CallWarningTypeOther,
 504					Message: "dropping empty user message (contains neither user-facing content nor tool results)",
 505				})
 506				continue
 507			}
 508
 509			input = append(input, responses.ResponseInputItemParamOfMessage(contentParts, responses.EasyInputMessageRoleUser))
 510
 511		case fantasy.MessageRoleAssistant:
 512			startIdx := len(input)
 513			for _, c := range msg.Content {
 514				switch c.GetType() {
 515				case fantasy.ContentTypeText:
 516					textPart, ok := fantasy.AsContentType[fantasy.TextPart](c)
 517					if !ok {
 518						warnings = append(warnings, fantasy.CallWarning{
 519							Type:    fantasy.CallWarningTypeOther,
 520							Message: "assistant message text part does not have the right type",
 521						})
 522						continue
 523					}
 524					input = append(input, responses.ResponseInputItemParamOfMessage(textPart.Text, responses.EasyInputMessageRoleAssistant))
 525
 526				case fantasy.ContentTypeToolCall:
 527					toolCallPart, ok := fantasy.AsContentType[fantasy.ToolCallPart](c)
 528					if !ok {
 529						warnings = append(warnings, fantasy.CallWarning{
 530							Type:    fantasy.CallWarningTypeOther,
 531							Message: "assistant message tool call part does not have the right type",
 532						})
 533						continue
 534					}
 535
 536					if toolCallPart.ProviderExecuted {
 537						// Round-trip provider-executed tools via
 538						// item_reference, letting the API resolve
 539						// the stored output item by ID.
 540						input = append(input, responses.ResponseInputItemParamOfItemReference(toolCallPart.ToolCallID))
 541						continue
 542					}
 543
 544					inputJSON, err := json.Marshal(toolCallPart.Input)
 545					if err != nil {
 546						warnings = append(warnings, fantasy.CallWarning{
 547							Type:    fantasy.CallWarningTypeOther,
 548							Message: fmt.Sprintf("failed to marshal tool call input: %v", err),
 549						})
 550						continue
 551					}
 552
 553					input = append(input, responses.ResponseInputItemParamOfFunctionCall(string(inputJSON), toolCallPart.ToolCallID, toolCallPart.ToolName))
 554				case fantasy.ContentTypeSource:
 555					// Source citations from web search are not a
 556					// recognised Responses API input type; skip.
 557					continue
 558				case fantasy.ContentTypeReasoning:
 559					reasoningMetadata := GetReasoningMetadata(c.Options())
 560					if reasoningMetadata == nil || reasoningMetadata.ItemID == "" {
 561						continue
 562					}
 563					if len(reasoningMetadata.Summary) == 0 && reasoningMetadata.EncryptedContent == nil {
 564						warnings = append(warnings, fantasy.CallWarning{
 565							Type:    fantasy.CallWarningTypeOther,
 566							Message: "assistant message reasoning part does is empty",
 567						})
 568						continue
 569					}
 570					// we want to always send an empty array
 571					summary := make([]responses.ResponseReasoningItemSummaryParam, 0, len(reasoningMetadata.Summary))
 572					for _, s := range reasoningMetadata.Summary {
 573						summary = append(summary, responses.ResponseReasoningItemSummaryParam{
 574							Type: "summary_text",
 575							Text: s,
 576						})
 577					}
 578					reasoning := &responses.ResponseReasoningItemParam{
 579						ID:      reasoningMetadata.ItemID,
 580						Summary: summary,
 581					}
 582					if reasoningMetadata.EncryptedContent != nil {
 583						reasoning.EncryptedContent = param.NewOpt(*reasoningMetadata.EncryptedContent)
 584					}
 585					input = append(input, responses.ResponseInputItemUnionParam{
 586						OfReasoning: reasoning,
 587					})
 588				}
 589			}
 590
 591			if !hasVisibleResponsesAssistantContent(input, startIdx) {
 592				warnings = append(warnings, fantasy.CallWarning{
 593					Type:    fantasy.CallWarningTypeOther,
 594					Message: "dropping empty assistant message (contains neither user-facing content nor tool calls)",
 595				})
 596				// Remove any items that were added during this iteration
 597				input = input[:startIdx]
 598				continue
 599			}
 600
 601		case fantasy.MessageRoleTool:
 602			for _, c := range msg.Content {
 603				if c.GetType() != fantasy.ContentTypeToolResult {
 604					warnings = append(warnings, fantasy.CallWarning{
 605						Type:    fantasy.CallWarningTypeOther,
 606						Message: "tool message can only have tool result content",
 607					})
 608					continue
 609				}
 610
 611				toolResultPart, ok := fantasy.AsContentType[fantasy.ToolResultPart](c)
 612				if !ok {
 613					warnings = append(warnings, fantasy.CallWarning{
 614						Type:    fantasy.CallWarningTypeOther,
 615						Message: "tool message result part does not have the right type",
 616					})
 617					continue
 618				}
 619
 620				// Provider-executed tool results (e.g. web search)
 621				// are already round-tripped via the tool call; skip.
 622				if toolResultPart.ProviderExecuted {
 623					continue
 624				}
 625
 626				var outputStr string
 627
 628				switch toolResultPart.Output.GetType() {
 629				case fantasy.ToolResultContentTypeText:
 630					output, ok := fantasy.AsToolResultOutputType[fantasy.ToolResultOutputContentText](toolResultPart.Output)
 631					if !ok {
 632						warnings = append(warnings, fantasy.CallWarning{
 633							Type:    fantasy.CallWarningTypeOther,
 634							Message: "tool result output does not have the right type",
 635						})
 636						continue
 637					}
 638					outputStr = output.Text
 639				case fantasy.ToolResultContentTypeError:
 640					output, ok := fantasy.AsToolResultOutputType[fantasy.ToolResultOutputContentError](toolResultPart.Output)
 641					if !ok {
 642						warnings = append(warnings, fantasy.CallWarning{
 643							Type:    fantasy.CallWarningTypeOther,
 644							Message: "tool result output does not have the right type",
 645						})
 646						continue
 647					}
 648					outputStr = output.Error.Error()
 649				}
 650
 651				input = append(input, responses.ResponseInputItemParamOfFunctionCallOutput(toolResultPart.ToolCallID, outputStr))
 652			}
 653		}
 654	}
 655
 656	return input, warnings
 657}
 658
 659func hasVisibleResponsesUserContent(content responses.ResponseInputMessageContentListParam) bool {
 660	return len(content) > 0
 661}
 662
 663func hasVisibleResponsesAssistantContent(items []responses.ResponseInputItemUnionParam, startIdx int) bool {
 664	// Check if we added any assistant content parts from this message
 665	for i := startIdx; i < len(items); i++ {
 666		if items[i].OfMessage != nil || items[i].OfFunctionCall != nil || items[i].OfItemReference != nil {
 667			return true
 668		}
 669	}
 670	return false
 671}
 672
 673func toResponsesTools(tools []fantasy.Tool, toolChoice *fantasy.ToolChoice, options *ResponsesProviderOptions) ([]responses.ToolUnionParam, responses.ResponseNewParamsToolChoiceUnion, []fantasy.CallWarning) {
 674	warnings := make([]fantasy.CallWarning, 0)
 675	var openaiTools []responses.ToolUnionParam
 676
 677	if len(tools) == 0 {
 678		return nil, responses.ResponseNewParamsToolChoiceUnion{}, nil
 679	}
 680
 681	strictJSONSchema := false
 682	if options != nil && options.StrictJSONSchema != nil {
 683		strictJSONSchema = *options.StrictJSONSchema
 684	}
 685
 686	for _, tool := range tools {
 687		if tool.GetType() == fantasy.ToolTypeFunction {
 688			ft, ok := tool.(fantasy.FunctionTool)
 689			if !ok {
 690				continue
 691			}
 692			openaiTools = append(openaiTools, responses.ToolUnionParam{
 693				OfFunction: &responses.FunctionToolParam{
 694					Name:        ft.Name,
 695					Description: param.NewOpt(ft.Description),
 696					Parameters:  ft.InputSchema,
 697					Strict:      param.NewOpt(strictJSONSchema),
 698					Type:        "function",
 699				},
 700			})
 701			continue
 702		}
 703		if tool.GetType() == fantasy.ToolTypeProviderDefined {
 704			pt, ok := tool.(fantasy.ProviderDefinedTool)
 705			if !ok {
 706				continue
 707			}
 708			switch pt.ID {
 709			case "web_search":
 710				openaiTools = append(openaiTools, toWebSearchToolParam(pt))
 711				continue
 712			}
 713		}
 714
 715		warnings = append(warnings, fantasy.CallWarning{
 716			Type:    fantasy.CallWarningTypeUnsupportedTool,
 717			Tool:    tool,
 718			Message: "tool is not supported",
 719		})
 720	}
 721
 722	if toolChoice == nil {
 723		return openaiTools, responses.ResponseNewParamsToolChoiceUnion{}, warnings
 724	}
 725
 726	var openaiToolChoice responses.ResponseNewParamsToolChoiceUnion
 727
 728	switch *toolChoice {
 729	case fantasy.ToolChoiceAuto:
 730		openaiToolChoice = responses.ResponseNewParamsToolChoiceUnion{
 731			OfToolChoiceMode: param.NewOpt(responses.ToolChoiceOptionsAuto),
 732		}
 733	case fantasy.ToolChoiceNone:
 734		openaiToolChoice = responses.ResponseNewParamsToolChoiceUnion{
 735			OfToolChoiceMode: param.NewOpt(responses.ToolChoiceOptionsNone),
 736		}
 737	case fantasy.ToolChoiceRequired:
 738		openaiToolChoice = responses.ResponseNewParamsToolChoiceUnion{
 739			OfToolChoiceMode: param.NewOpt(responses.ToolChoiceOptionsRequired),
 740		}
 741	default:
 742		openaiToolChoice = responses.ResponseNewParamsToolChoiceUnion{
 743			OfFunctionTool: &responses.ToolChoiceFunctionParam{
 744				Type: "function",
 745				Name: string(*toolChoice),
 746			},
 747		}
 748	}
 749
 750	return openaiTools, openaiToolChoice, warnings
 751}
 752
 753func (o responsesLanguageModel) Generate(ctx context.Context, call fantasy.Call) (*fantasy.Response, error) {
 754	params, warnings, err := o.prepareParams(call)
 755	if err != nil {
 756		return nil, err
 757	}
 758
 759	response, err := o.client.Responses.New(ctx, *params, callUARequestOptions(call, o.noDefaultUserAgent)...)
 760	if err != nil {
 761		return nil, toProviderErr(err)
 762	}
 763
 764	if response.Error.Message != "" {
 765		return nil, &fantasy.Error{
 766			Title:   "provider error",
 767			Message: fmt.Sprintf("%s (code: %s)", response.Error.Message, response.Error.Code),
 768		}
 769	}
 770
 771	var content []fantasy.Content
 772	hasFunctionCall := false
 773
 774	for _, outputItem := range response.Output {
 775		switch outputItem.Type {
 776		case "message":
 777			for _, contentPart := range outputItem.Content {
 778				if contentPart.Type == "output_text" {
 779					content = append(content, fantasy.TextContent{
 780						Text: contentPart.Text,
 781					})
 782
 783					for _, annotation := range contentPart.Annotations {
 784						switch annotation.Type {
 785						case "url_citation":
 786							content = append(content, fantasy.SourceContent{
 787								SourceType: fantasy.SourceTypeURL,
 788								ID:         uuid.NewString(),
 789								URL:        annotation.URL,
 790								Title:      annotation.Title,
 791							})
 792						case "file_citation":
 793							title := "Document"
 794							if annotation.Filename != "" {
 795								title = annotation.Filename
 796							}
 797							filename := annotation.Filename
 798							if filename == "" {
 799								filename = annotation.FileID
 800							}
 801							content = append(content, fantasy.SourceContent{
 802								SourceType: fantasy.SourceTypeDocument,
 803								ID:         uuid.NewString(),
 804								MediaType:  "text/plain",
 805								Title:      title,
 806								Filename:   filename,
 807							})
 808						}
 809					}
 810				}
 811			}
 812
 813		case "function_call":
 814			hasFunctionCall = true
 815			content = append(content, fantasy.ToolCallContent{
 816				ProviderExecuted: false,
 817				ToolCallID:       outputItem.CallID,
 818				ToolName:         outputItem.Name,
 819				Input:            outputItem.Arguments.OfString,
 820			})
 821
 822		case "web_search_call":
 823			// Provider-executed web search tool call. Emit both
 824			// a ToolCallContent and ToolResultContent as a pair,
 825			// matching the vercel/ai pattern for provider tools.
 826			//
 827			// Note: source citations come from url_citation annotations
 828			// on the message text (handled in the "message" case above),
 829			// not from the web_search_call action.
 830			wsMeta := webSearchCallToMetadata(outputItem.ID, outputItem.Action)
 831			content = append(content, fantasy.ToolCallContent{
 832				ProviderExecuted: true,
 833				ToolCallID:       outputItem.ID,
 834				ToolName:         "web_search",
 835			})
 836			content = append(content, fantasy.ToolResultContent{
 837				ProviderExecuted: true,
 838				ToolCallID:       outputItem.ID,
 839				ToolName:         "web_search",
 840				ProviderMetadata: fantasy.ProviderMetadata{
 841					Name: wsMeta,
 842				},
 843			})
 844		case "reasoning":
 845			metadata := &ResponsesReasoningMetadata{
 846				ItemID: outputItem.ID,
 847			}
 848			if outputItem.EncryptedContent != "" {
 849				metadata.EncryptedContent = &outputItem.EncryptedContent
 850			}
 851
 852			if len(outputItem.Summary) == 0 && metadata.EncryptedContent == nil {
 853				continue
 854			}
 855
 856			// When there are no summary parts, add an empty reasoning part
 857			summaries := outputItem.Summary
 858			if len(summaries) == 0 {
 859				summaries = []responses.ResponseReasoningItemSummary{{Type: "summary_text", Text: ""}}
 860			}
 861
 862			for _, s := range summaries {
 863				metadata.Summary = append(metadata.Summary, s.Text)
 864			}
 865
 866			content = append(content, fantasy.ReasoningContent{
 867				Text: strings.Join(metadata.Summary, "\n"),
 868				ProviderMetadata: fantasy.ProviderMetadata{
 869					Name: metadata,
 870				},
 871			})
 872		}
 873	}
 874
 875	usage := responsesUsage(*response)
 876	finishReason := mapResponsesFinishReason(response.IncompleteDetails.Reason, hasFunctionCall)
 877
 878	return &fantasy.Response{
 879		Content:          content,
 880		Usage:            usage,
 881		FinishReason:     finishReason,
 882		ProviderMetadata: responsesProviderMetadata(response.ID),
 883		Warnings:         warnings,
 884	}, nil
 885}
 886
 887func mapResponsesFinishReason(reason string, hasFunctionCall bool) fantasy.FinishReason {
 888	if hasFunctionCall {
 889		return fantasy.FinishReasonToolCalls
 890	}
 891
 892	switch reason {
 893	case "":
 894		return fantasy.FinishReasonStop
 895	case "max_tokens", "max_output_tokens":
 896		return fantasy.FinishReasonLength
 897	case "content_filter":
 898		return fantasy.FinishReasonContentFilter
 899	default:
 900		return fantasy.FinishReasonOther
 901	}
 902}
 903
 904func (o responsesLanguageModel) Stream(ctx context.Context, call fantasy.Call) (fantasy.StreamResponse, error) {
 905	params, warnings, err := o.prepareParams(call)
 906	if err != nil {
 907		return nil, err
 908	}
 909
 910	stream := o.client.Responses.NewStreaming(ctx, *params, callUARequestOptions(call, o.noDefaultUserAgent)...)
 911
 912	finishReason := fantasy.FinishReasonUnknown
 913	var usage fantasy.Usage
 914	// responseID tracks the server-assigned response ID. It's first set from the
 915	// response.created event and may be overwritten by response.completed or
 916	// response.incomplete events. Per the OpenAI API contract, these IDs are
 917	// identical; the overwrites ensure we have the final value even if an event
 918	// is missed.
 919	responseID := ""
 920	ongoingToolCalls := make(map[int64]*ongoingToolCall)
 921	hasFunctionCall := false
 922	activeReasoning := make(map[string]*reasoningState)
 923
 924	return func(yield func(fantasy.StreamPart) bool) {
 925		if len(warnings) > 0 {
 926			if !yield(fantasy.StreamPart{
 927				Type:     fantasy.StreamPartTypeWarnings,
 928				Warnings: warnings,
 929			}) {
 930				return
 931			}
 932		}
 933
 934		for stream.Next() {
 935			event := stream.Current()
 936
 937			switch event.Type {
 938			case "response.created":
 939				created := event.AsResponseCreated()
 940				responseID = created.Response.ID
 941
 942			case "response.output_item.added":
 943				added := event.AsResponseOutputItemAdded()
 944				switch added.Item.Type {
 945				case "function_call":
 946					ongoingToolCalls[added.OutputIndex] = &ongoingToolCall{
 947						toolName:   added.Item.Name,
 948						toolCallID: added.Item.CallID,
 949					}
 950					if !yield(fantasy.StreamPart{
 951						Type:         fantasy.StreamPartTypeToolInputStart,
 952						ID:           added.Item.CallID,
 953						ToolCallName: added.Item.Name,
 954					}) {
 955						return
 956					}
 957
 958				case "web_search_call":
 959					// Provider-executed web search; emit start.
 960					if !yield(fantasy.StreamPart{
 961						Type:             fantasy.StreamPartTypeToolInputStart,
 962						ID:               added.Item.ID,
 963						ToolCallName:     "web_search",
 964						ProviderExecuted: true,
 965					}) {
 966						return
 967					}
 968
 969				case "message":
 970					if !yield(fantasy.StreamPart{
 971						Type: fantasy.StreamPartTypeTextStart,
 972						ID:   added.Item.ID,
 973					}) {
 974						return
 975					}
 976
 977				case "reasoning":
 978					metadata := &ResponsesReasoningMetadata{
 979						ItemID:  added.Item.ID,
 980						Summary: []string{},
 981					}
 982					if added.Item.EncryptedContent != "" {
 983						metadata.EncryptedContent = &added.Item.EncryptedContent
 984					}
 985
 986					activeReasoning[added.Item.ID] = &reasoningState{
 987						metadata: metadata,
 988					}
 989					if !yield(fantasy.StreamPart{
 990						Type: fantasy.StreamPartTypeReasoningStart,
 991						ID:   added.Item.ID,
 992						ProviderMetadata: fantasy.ProviderMetadata{
 993							Name: metadata,
 994						},
 995					}) {
 996						return
 997					}
 998				}
 999
1000			case "response.output_item.done":
1001				done := event.AsResponseOutputItemDone()
1002				switch done.Item.Type {
1003				case "function_call":
1004					tc := ongoingToolCalls[done.OutputIndex]
1005					if tc != nil {
1006						delete(ongoingToolCalls, done.OutputIndex)
1007						hasFunctionCall = true
1008
1009						if !yield(fantasy.StreamPart{
1010							Type: fantasy.StreamPartTypeToolInputEnd,
1011							ID:   done.Item.CallID,
1012						}) {
1013							return
1014						}
1015						if !yield(fantasy.StreamPart{
1016							Type:          fantasy.StreamPartTypeToolCall,
1017							ID:            done.Item.CallID,
1018							ToolCallName:  done.Item.Name,
1019							ToolCallInput: done.Item.Arguments.OfString,
1020						}) {
1021							return
1022						}
1023					}
1024
1025				case "web_search_call":
1026					// Provider-executed web search completed.
1027					// Source citations come from url_citation annotations
1028					// on the streamed message text, not from the action.
1029					if !yield(fantasy.StreamPart{
1030						Type: fantasy.StreamPartTypeToolInputEnd,
1031						ID:   done.Item.ID,
1032					}) {
1033						return
1034					}
1035					if !yield(fantasy.StreamPart{
1036						Type:             fantasy.StreamPartTypeToolCall,
1037						ID:               done.Item.ID,
1038						ToolCallName:     "web_search",
1039						ProviderExecuted: true,
1040					}) {
1041						return
1042					}
1043					// Emit a ToolResult so the agent framework
1044					// includes it in round-trip messages.
1045					if !yield(fantasy.StreamPart{
1046						Type:             fantasy.StreamPartTypeToolResult,
1047						ID:               done.Item.ID,
1048						ToolCallName:     "web_search",
1049						ProviderExecuted: true,
1050						ProviderMetadata: fantasy.ProviderMetadata{
1051							Name: webSearchCallToMetadata(done.Item.ID, done.Item.Action),
1052						},
1053					}) {
1054						return
1055					}
1056				case "message":
1057					if !yield(fantasy.StreamPart{
1058						Type: fantasy.StreamPartTypeTextEnd,
1059						ID:   done.Item.ID,
1060					}) {
1061						return
1062					}
1063
1064				case "reasoning":
1065					state := activeReasoning[done.Item.ID]
1066					if state != nil {
1067						if !yield(fantasy.StreamPart{
1068							Type: fantasy.StreamPartTypeReasoningEnd,
1069							ID:   done.Item.ID,
1070							ProviderMetadata: fantasy.ProviderMetadata{
1071								Name: state.metadata,
1072							},
1073						}) {
1074							return
1075						}
1076						delete(activeReasoning, done.Item.ID)
1077					}
1078				}
1079
1080			case "response.function_call_arguments.delta":
1081				delta := event.AsResponseFunctionCallArgumentsDelta()
1082				tc := ongoingToolCalls[delta.OutputIndex]
1083				if tc != nil {
1084					if !yield(fantasy.StreamPart{
1085						Type:  fantasy.StreamPartTypeToolInputDelta,
1086						ID:    tc.toolCallID,
1087						Delta: delta.Delta,
1088					}) {
1089						return
1090					}
1091				}
1092
1093			case "response.output_text.delta":
1094				textDelta := event.AsResponseOutputTextDelta()
1095				if !yield(fantasy.StreamPart{
1096					Type:  fantasy.StreamPartTypeTextDelta,
1097					ID:    textDelta.ItemID,
1098					Delta: textDelta.Delta,
1099				}) {
1100					return
1101				}
1102
1103			case "response.reasoning_summary_part.added":
1104				added := event.AsResponseReasoningSummaryPartAdded()
1105				state := activeReasoning[added.ItemID]
1106				if state != nil {
1107					state.metadata.Summary = append(state.metadata.Summary, "")
1108					activeReasoning[added.ItemID] = state
1109					if !yield(fantasy.StreamPart{
1110						Type:  fantasy.StreamPartTypeReasoningDelta,
1111						ID:    added.ItemID,
1112						Delta: "\n",
1113						ProviderMetadata: fantasy.ProviderMetadata{
1114							Name: state.metadata,
1115						},
1116					}) {
1117						return
1118					}
1119				}
1120
1121			case "response.reasoning_summary_text.delta":
1122				textDelta := event.AsResponseReasoningSummaryTextDelta()
1123				state := activeReasoning[textDelta.ItemID]
1124				if state != nil {
1125					if len(state.metadata.Summary)-1 >= int(textDelta.SummaryIndex) {
1126						state.metadata.Summary[textDelta.SummaryIndex] += textDelta.Delta
1127					}
1128					activeReasoning[textDelta.ItemID] = state
1129					if !yield(fantasy.StreamPart{
1130						Type:  fantasy.StreamPartTypeReasoningDelta,
1131						ID:    textDelta.ItemID,
1132						Delta: textDelta.Delta,
1133						ProviderMetadata: fantasy.ProviderMetadata{
1134							Name: state.metadata,
1135						},
1136					}) {
1137						return
1138					}
1139				}
1140
1141			case "response.completed":
1142				completed := event.AsResponseCompleted()
1143				responseID = completed.Response.ID
1144				finishReason = mapResponsesFinishReason(completed.Response.IncompleteDetails.Reason, hasFunctionCall)
1145				usage = responsesUsage(completed.Response)
1146
1147			case "response.incomplete":
1148				incomplete := event.AsResponseIncomplete()
1149				responseID = incomplete.Response.ID
1150				finishReason = mapResponsesFinishReason(incomplete.Response.IncompleteDetails.Reason, hasFunctionCall)
1151				usage = responsesUsage(incomplete.Response)
1152
1153			case "error":
1154				errorEvent := event.AsError()
1155				if !yield(fantasy.StreamPart{
1156					Type:  fantasy.StreamPartTypeError,
1157					Error: fmt.Errorf("response error: %s (code: %s)", errorEvent.Message, errorEvent.Code),
1158				}) {
1159					return
1160				}
1161				return
1162			}
1163		}
1164
1165		err := stream.Err()
1166		if err != nil {
1167			yield(fantasy.StreamPart{
1168				Type:  fantasy.StreamPartTypeError,
1169				Error: toProviderErr(err),
1170			})
1171			return
1172		}
1173
1174		yield(fantasy.StreamPart{
1175			Type:             fantasy.StreamPartTypeFinish,
1176			Usage:            usage,
1177			FinishReason:     finishReason,
1178			ProviderMetadata: responsesProviderMetadata(responseID),
1179		})
1180	}, nil
1181}
1182
1183// toWebSearchToolParam converts a ProviderDefinedTool with ID
1184// "web_search" into the OpenAI SDK's WebSearchToolParam.
1185func toWebSearchToolParam(pt fantasy.ProviderDefinedTool) responses.ToolUnionParam {
1186	wst := responses.WebSearchToolParam{
1187		Type: responses.WebSearchToolTypeWebSearch,
1188	}
1189	if pt.Args != nil {
1190		if size, ok := pt.Args["search_context_size"].(SearchContextSize); ok && size != "" {
1191			wst.SearchContextSize = responses.WebSearchToolSearchContextSize(size)
1192		}
1193		// Also accept plain string for search_context_size.
1194		if size, ok := pt.Args["search_context_size"].(string); ok && size != "" {
1195			wst.SearchContextSize = responses.WebSearchToolSearchContextSize(size)
1196		}
1197		if domains, ok := pt.Args["allowed_domains"].([]string); ok && len(domains) > 0 {
1198			wst.Filters.AllowedDomains = domains
1199		}
1200		if loc, ok := pt.Args["user_location"].(*WebSearchUserLocation); ok && loc != nil {
1201			if loc.City != "" {
1202				wst.UserLocation.City = param.NewOpt(loc.City)
1203			}
1204			if loc.Region != "" {
1205				wst.UserLocation.Region = param.NewOpt(loc.Region)
1206			}
1207			if loc.Country != "" {
1208				wst.UserLocation.Country = param.NewOpt(loc.Country)
1209			}
1210			if loc.Timezone != "" {
1211				wst.UserLocation.Timezone = param.NewOpt(loc.Timezone)
1212			}
1213		}
1214	}
1215	return responses.ToolUnionParam{
1216		OfWebSearch: &wst,
1217	}
1218}
1219
1220// webSearchCallToMetadata converts an OpenAI web search call output
1221// into our structured metadata for round-tripping.
1222func webSearchCallToMetadata(itemID string, action responses.ResponseOutputItemUnionAction) *WebSearchCallMetadata {
1223	meta := &WebSearchCallMetadata{ItemID: itemID}
1224	if action.Type != "" {
1225		a := &WebSearchAction{
1226			Type:  action.Type,
1227			Query: action.Query,
1228		}
1229		for _, src := range action.Sources {
1230			a.Sources = append(a.Sources, WebSearchSource{
1231				Type: string(src.Type),
1232				URL:  src.URL,
1233			})
1234		}
1235		meta.Action = a
1236	}
1237	return meta
1238}
1239
1240// GetReasoningMetadata extracts reasoning metadata from provider options for responses models.
1241func GetReasoningMetadata(providerOptions fantasy.ProviderOptions) *ResponsesReasoningMetadata {
1242	if openaiResponsesOptions, ok := providerOptions[Name]; ok {
1243		if reasoning, ok := openaiResponsesOptions.(*ResponsesReasoningMetadata); ok {
1244			return reasoning
1245		}
1246	}
1247	return nil
1248}
1249
1250type ongoingToolCall struct {
1251	toolName   string
1252	toolCallID string
1253}
1254
1255type reasoningState struct {
1256	metadata *ResponsesReasoningMetadata
1257}
1258
1259// GenerateObject implements fantasy.LanguageModel.
1260func (o responsesLanguageModel) GenerateObject(ctx context.Context, call fantasy.ObjectCall) (*fantasy.ObjectResponse, error) {
1261	switch o.objectMode {
1262	case fantasy.ObjectModeText:
1263		return object.GenerateWithText(ctx, o, call)
1264	case fantasy.ObjectModeTool:
1265		return object.GenerateWithTool(ctx, o, call)
1266	default:
1267		return o.generateObjectWithJSONMode(ctx, call)
1268	}
1269}
1270
1271// StreamObject implements fantasy.LanguageModel.
1272func (o responsesLanguageModel) StreamObject(ctx context.Context, call fantasy.ObjectCall) (fantasy.ObjectStreamResponse, error) {
1273	switch o.objectMode {
1274	case fantasy.ObjectModeTool:
1275		return object.StreamWithTool(ctx, o, call)
1276	case fantasy.ObjectModeText:
1277		return object.StreamWithText(ctx, o, call)
1278	default:
1279		return o.streamObjectWithJSONMode(ctx, call)
1280	}
1281}
1282
1283func (o responsesLanguageModel) generateObjectWithJSONMode(ctx context.Context, call fantasy.ObjectCall) (*fantasy.ObjectResponse, error) {
1284	// Convert our Schema to OpenAI's JSON Schema format
1285	jsonSchemaMap := schema.ToMap(call.Schema)
1286
1287	// Add additionalProperties: false recursively for strict mode (OpenAI requirement)
1288	addAdditionalPropertiesFalse(jsonSchemaMap)
1289
1290	schemaName := call.SchemaName
1291	if schemaName == "" {
1292		schemaName = "response"
1293	}
1294
1295	// Build request using prepareParams
1296	fantasyCall := fantasy.Call{
1297		Prompt:           call.Prompt,
1298		MaxOutputTokens:  call.MaxOutputTokens,
1299		Temperature:      call.Temperature,
1300		TopP:             call.TopP,
1301		PresencePenalty:  call.PresencePenalty,
1302		FrequencyPenalty: call.FrequencyPenalty,
1303		ProviderOptions:  call.ProviderOptions,
1304	}
1305
1306	params, warnings, err := o.prepareParams(fantasyCall)
1307	if err != nil {
1308		return nil, err
1309	}
1310
1311	// Add structured output via Text.Format field
1312	params.Text = responses.ResponseTextConfigParam{
1313		Format: responses.ResponseFormatTextConfigParamOfJSONSchema(schemaName, jsonSchemaMap),
1314	}
1315
1316	// Make request
1317	response, err := o.client.Responses.New(ctx, *params, objectCallUARequestOptions(call, o.noDefaultUserAgent)...)
1318	if err != nil {
1319		return nil, toProviderErr(err)
1320	}
1321
1322	if response.Error.Message != "" {
1323		return nil, &fantasy.Error{
1324			Title:   "provider error",
1325			Message: fmt.Sprintf("%s (code: %s)", response.Error.Message, response.Error.Code),
1326		}
1327	}
1328
1329	// Extract JSON text from response
1330	var jsonText string
1331	for _, outputItem := range response.Output {
1332		if outputItem.Type == "message" {
1333			for _, contentPart := range outputItem.Content {
1334				if contentPart.Type == "output_text" {
1335					jsonText = contentPart.Text
1336					break
1337				}
1338			}
1339		}
1340	}
1341
1342	if jsonText == "" {
1343		usage := fantasy.Usage{
1344			InputTokens:  response.Usage.InputTokens,
1345			OutputTokens: response.Usage.OutputTokens,
1346			TotalTokens:  response.Usage.InputTokens + response.Usage.OutputTokens,
1347		}
1348		finishReason := mapResponsesFinishReason(response.IncompleteDetails.Reason, false)
1349		return nil, &fantasy.NoObjectGeneratedError{
1350			RawText:      "",
1351			ParseError:   fmt.Errorf("no text content in response"),
1352			Usage:        usage,
1353			FinishReason: finishReason,
1354		}
1355	}
1356
1357	// Parse and validate
1358	var obj any
1359	if call.RepairText != nil {
1360		obj, err = schema.ParseAndValidateWithRepair(ctx, jsonText, call.Schema, call.RepairText)
1361	} else {
1362		obj, err = schema.ParseAndValidate(jsonText, call.Schema)
1363	}
1364
1365	usage := responsesUsage(*response)
1366	finishReason := mapResponsesFinishReason(response.IncompleteDetails.Reason, false)
1367
1368	if err != nil {
1369		// Add usage info to error
1370		if nogErr, ok := err.(*fantasy.NoObjectGeneratedError); ok {
1371			nogErr.Usage = usage
1372			nogErr.FinishReason = finishReason
1373		}
1374		return nil, err
1375	}
1376
1377	return &fantasy.ObjectResponse{
1378		Object:           obj,
1379		RawText:          jsonText,
1380		Usage:            usage,
1381		FinishReason:     finishReason,
1382		Warnings:         warnings,
1383		ProviderMetadata: responsesProviderMetadata(response.ID),
1384	}, nil
1385}
1386
1387func (o responsesLanguageModel) streamObjectWithJSONMode(ctx context.Context, call fantasy.ObjectCall) (fantasy.ObjectStreamResponse, error) {
1388	// Convert our Schema to OpenAI's JSON Schema format
1389	jsonSchemaMap := schema.ToMap(call.Schema)
1390
1391	// Add additionalProperties: false recursively for strict mode (OpenAI requirement)
1392	addAdditionalPropertiesFalse(jsonSchemaMap)
1393
1394	schemaName := call.SchemaName
1395	if schemaName == "" {
1396		schemaName = "response"
1397	}
1398
1399	// Build request using prepareParams
1400	fantasyCall := fantasy.Call{
1401		Prompt:           call.Prompt,
1402		MaxOutputTokens:  call.MaxOutputTokens,
1403		Temperature:      call.Temperature,
1404		TopP:             call.TopP,
1405		PresencePenalty:  call.PresencePenalty,
1406		FrequencyPenalty: call.FrequencyPenalty,
1407		ProviderOptions:  call.ProviderOptions,
1408	}
1409
1410	params, warnings, err := o.prepareParams(fantasyCall)
1411	if err != nil {
1412		return nil, err
1413	}
1414
1415	// Add structured output via Text.Format field
1416	params.Text = responses.ResponseTextConfigParam{
1417		Format: responses.ResponseFormatTextConfigParamOfJSONSchema(schemaName, jsonSchemaMap),
1418	}
1419
1420	stream := o.client.Responses.NewStreaming(ctx, *params, objectCallUARequestOptions(call, o.noDefaultUserAgent)...)
1421
1422	return func(yield func(fantasy.ObjectStreamPart) bool) {
1423		if len(warnings) > 0 {
1424			if !yield(fantasy.ObjectStreamPart{
1425				Type:     fantasy.ObjectStreamPartTypeObject,
1426				Warnings: warnings,
1427			}) {
1428				return
1429			}
1430		}
1431
1432		var accumulated string
1433		var lastParsedObject any
1434		var usage fantasy.Usage
1435		var finishReason fantasy.FinishReason
1436		// responseID tracks the server-assigned response ID. It's first set from the
1437		// response.created event and may be overwritten by response.completed or
1438		// response.incomplete events. Per the OpenAI API contract, these IDs are
1439		// identical; the overwrites ensure we have the final value even if an event
1440		// is missed.
1441		var responseID string
1442		var streamErr error
1443		hasFunctionCall := false
1444
1445		for stream.Next() {
1446			event := stream.Current()
1447
1448			switch event.Type {
1449			case "response.created":
1450				created := event.AsResponseCreated()
1451				responseID = created.Response.ID
1452
1453			case "response.output_text.delta":
1454				textDelta := event.AsResponseOutputTextDelta()
1455				accumulated += textDelta.Delta
1456
1457				// Try to parse the accumulated text
1458				obj, state, parseErr := schema.ParsePartialJSON(accumulated)
1459
1460				// If we successfully parsed, validate and emit
1461				if state == schema.ParseStateSuccessful || state == schema.ParseStateRepaired {
1462					if err := schema.ValidateAgainstSchema(obj, call.Schema); err == nil {
1463						// Only emit if object is different from last
1464						if !reflect.DeepEqual(obj, lastParsedObject) {
1465							if !yield(fantasy.ObjectStreamPart{
1466								Type:   fantasy.ObjectStreamPartTypeObject,
1467								Object: obj,
1468							}) {
1469								return
1470							}
1471							lastParsedObject = obj
1472						}
1473					}
1474				}
1475
1476				// If parsing failed and we have a repair function, try it
1477				if state == schema.ParseStateFailed && call.RepairText != nil {
1478					repairedText, repairErr := call.RepairText(ctx, accumulated, parseErr)
1479					if repairErr == nil {
1480						obj2, state2, _ := schema.ParsePartialJSON(repairedText)
1481						if (state2 == schema.ParseStateSuccessful || state2 == schema.ParseStateRepaired) &&
1482							schema.ValidateAgainstSchema(obj2, call.Schema) == nil {
1483							if !reflect.DeepEqual(obj2, lastParsedObject) {
1484								if !yield(fantasy.ObjectStreamPart{
1485									Type:   fantasy.ObjectStreamPartTypeObject,
1486									Object: obj2,
1487								}) {
1488									return
1489								}
1490								lastParsedObject = obj2
1491							}
1492						}
1493					}
1494				}
1495
1496			case "response.completed":
1497				completed := event.AsResponseCompleted()
1498				responseID = completed.Response.ID
1499				finishReason = mapResponsesFinishReason(completed.Response.IncompleteDetails.Reason, hasFunctionCall)
1500				usage = responsesUsage(completed.Response)
1501
1502			case "response.incomplete":
1503				incomplete := event.AsResponseIncomplete()
1504				responseID = incomplete.Response.ID
1505				finishReason = mapResponsesFinishReason(incomplete.Response.IncompleteDetails.Reason, hasFunctionCall)
1506				usage = responsesUsage(incomplete.Response)
1507
1508			case "error":
1509				errorEvent := event.AsError()
1510				streamErr = fmt.Errorf("response error: %s (code: %s)", errorEvent.Message, errorEvent.Code)
1511				if !yield(fantasy.ObjectStreamPart{
1512					Type:  fantasy.ObjectStreamPartTypeError,
1513					Error: streamErr,
1514				}) {
1515					return
1516				}
1517				return
1518			}
1519		}
1520
1521		err := stream.Err()
1522		if err != nil {
1523			yield(fantasy.ObjectStreamPart{
1524				Type:  fantasy.ObjectStreamPartTypeError,
1525				Error: toProviderErr(err),
1526			})
1527			return
1528		}
1529
1530		// Final validation and emit
1531		if streamErr == nil && lastParsedObject != nil {
1532			yield(fantasy.ObjectStreamPart{
1533				Type:             fantasy.ObjectStreamPartTypeFinish,
1534				Usage:            usage,
1535				FinishReason:     finishReason,
1536				ProviderMetadata: responsesProviderMetadata(responseID),
1537			})
1538		} else if streamErr == nil && lastParsedObject == nil {
1539			// No object was generated
1540			yield(fantasy.ObjectStreamPart{
1541				Type: fantasy.ObjectStreamPartTypeError,
1542				Error: &fantasy.NoObjectGeneratedError{
1543					RawText:      accumulated,
1544					ParseError:   fmt.Errorf("no valid object generated in stream"),
1545					Usage:        usage,
1546					FinishReason: finishReason,
1547				},
1548			})
1549		}
1550	}, nil
1551}