responses_language_model.go

   1package openai
   2
   3import (
   4	"context"
   5	"encoding/base64"
   6	"encoding/json"
   7	"fmt"
   8	"reflect"
   9	"strings"
  10
  11	"charm.land/fantasy"
  12	"charm.land/fantasy/object"
  13	"charm.land/fantasy/schema"
  14	"github.com/google/uuid"
  15	"github.com/openai/openai-go/v2"
  16	"github.com/openai/openai-go/v2/packages/param"
  17	"github.com/openai/openai-go/v2/responses"
  18	"github.com/openai/openai-go/v2/shared"
  19)
  20
  21const topLogprobsMax = 20
  22
  23type responsesLanguageModel struct {
  24	provider   string
  25	modelID    string
  26	client     openai.Client
  27	objectMode fantasy.ObjectMode
  28}
  29
  30// newResponsesLanguageModel implements a responses api model
  31// INFO: (kujtim) currently we do not support stored parameter we default it to false.
  32func newResponsesLanguageModel(modelID string, provider string, client openai.Client, objectMode fantasy.ObjectMode) responsesLanguageModel {
  33	return responsesLanguageModel{
  34		modelID:    modelID,
  35		provider:   provider,
  36		client:     client,
  37		objectMode: objectMode,
  38	}
  39}
  40
  41func (o responsesLanguageModel) Model() string {
  42	return o.modelID
  43}
  44
  45func (o responsesLanguageModel) Provider() string {
  46	return o.provider
  47}
  48
  49type responsesModelConfig struct {
  50	isReasoningModel           bool
  51	systemMessageMode          string
  52	requiredAutoTruncation     bool
  53	supportsFlexProcessing     bool
  54	supportsPriorityProcessing bool
  55}
  56
  57func getResponsesModelConfig(modelID string) responsesModelConfig {
  58	supportsFlexProcessing := strings.HasPrefix(modelID, "o3") ||
  59		strings.Contains(modelID, "-o3") || strings.Contains(modelID, "o4-mini") ||
  60		(strings.Contains(modelID, "gpt-5") && !strings.Contains(modelID, "gpt-5-chat"))
  61
  62	supportsPriorityProcessing := strings.Contains(modelID, "gpt-4") ||
  63		strings.Contains(modelID, "gpt-5-mini") ||
  64		(strings.Contains(modelID, "gpt-5") &&
  65			!strings.Contains(modelID, "gpt-5-nano") &&
  66			!strings.Contains(modelID, "gpt-5-chat")) ||
  67		strings.HasPrefix(modelID, "o3") ||
  68		strings.Contains(modelID, "-o3") ||
  69		strings.Contains(modelID, "o4-mini")
  70
  71	defaults := responsesModelConfig{
  72		requiredAutoTruncation:     false,
  73		systemMessageMode:          "system",
  74		supportsFlexProcessing:     supportsFlexProcessing,
  75		supportsPriorityProcessing: supportsPriorityProcessing,
  76	}
  77
  78	if strings.Contains(modelID, "gpt-5-chat") {
  79		return responsesModelConfig{
  80			isReasoningModel:           false,
  81			systemMessageMode:          defaults.systemMessageMode,
  82			requiredAutoTruncation:     defaults.requiredAutoTruncation,
  83			supportsFlexProcessing:     defaults.supportsFlexProcessing,
  84			supportsPriorityProcessing: defaults.supportsPriorityProcessing,
  85		}
  86	}
  87
  88	if strings.HasPrefix(modelID, "o1") || strings.Contains(modelID, "-o1") ||
  89		strings.HasPrefix(modelID, "o3") || strings.Contains(modelID, "-o3") ||
  90		strings.HasPrefix(modelID, "o4") || strings.Contains(modelID, "-o4") ||
  91		strings.HasPrefix(modelID, "oss") || strings.Contains(modelID, "-oss") ||
  92		strings.Contains(modelID, "gpt-5") || strings.Contains(modelID, "codex-") ||
  93		strings.Contains(modelID, "computer-use") {
  94		if strings.Contains(modelID, "o1-mini") || strings.Contains(modelID, "o1-preview") {
  95			return responsesModelConfig{
  96				isReasoningModel:           true,
  97				systemMessageMode:          "remove",
  98				requiredAutoTruncation:     defaults.requiredAutoTruncation,
  99				supportsFlexProcessing:     defaults.supportsFlexProcessing,
 100				supportsPriorityProcessing: defaults.supportsPriorityProcessing,
 101			}
 102		}
 103
 104		return responsesModelConfig{
 105			isReasoningModel:           true,
 106			systemMessageMode:          "developer",
 107			requiredAutoTruncation:     defaults.requiredAutoTruncation,
 108			supportsFlexProcessing:     defaults.supportsFlexProcessing,
 109			supportsPriorityProcessing: defaults.supportsPriorityProcessing,
 110		}
 111	}
 112
 113	return responsesModelConfig{
 114		isReasoningModel:           false,
 115		systemMessageMode:          defaults.systemMessageMode,
 116		requiredAutoTruncation:     defaults.requiredAutoTruncation,
 117		supportsFlexProcessing:     defaults.supportsFlexProcessing,
 118		supportsPriorityProcessing: defaults.supportsPriorityProcessing,
 119	}
 120}
 121
 122func (o responsesLanguageModel) prepareParams(call fantasy.Call) (*responses.ResponseNewParams, []fantasy.CallWarning) {
 123	var warnings []fantasy.CallWarning
 124	params := &responses.ResponseNewParams{
 125		Store: param.NewOpt(false),
 126	}
 127
 128	modelConfig := getResponsesModelConfig(o.modelID)
 129
 130	if call.TopK != nil {
 131		warnings = append(warnings, fantasy.CallWarning{
 132			Type:    fantasy.CallWarningTypeUnsupportedSetting,
 133			Setting: "topK",
 134		})
 135	}
 136
 137	if call.PresencePenalty != nil {
 138		warnings = append(warnings, fantasy.CallWarning{
 139			Type:    fantasy.CallWarningTypeUnsupportedSetting,
 140			Setting: "presencePenalty",
 141		})
 142	}
 143
 144	if call.FrequencyPenalty != nil {
 145		warnings = append(warnings, fantasy.CallWarning{
 146			Type:    fantasy.CallWarningTypeUnsupportedSetting,
 147			Setting: "frequencyPenalty",
 148		})
 149	}
 150
 151	var openaiOptions *ResponsesProviderOptions
 152	if opts, ok := call.ProviderOptions[Name]; ok {
 153		if typedOpts, ok := opts.(*ResponsesProviderOptions); ok {
 154			openaiOptions = typedOpts
 155		}
 156	}
 157
 158	input, inputWarnings := toResponsesPrompt(call.Prompt, modelConfig.systemMessageMode)
 159	warnings = append(warnings, inputWarnings...)
 160
 161	var include []IncludeType
 162
 163	addInclude := func(key IncludeType) {
 164		include = append(include, key)
 165	}
 166
 167	topLogprobs := 0
 168	if openaiOptions != nil && openaiOptions.Logprobs != nil {
 169		switch v := openaiOptions.Logprobs.(type) {
 170		case bool:
 171			if v {
 172				topLogprobs = topLogprobsMax
 173			}
 174		case float64:
 175			topLogprobs = int(v)
 176		case int:
 177			topLogprobs = v
 178		}
 179	}
 180
 181	if topLogprobs > 0 {
 182		addInclude(IncludeMessageOutputTextLogprobs)
 183	}
 184
 185	params.Model = o.modelID
 186	params.Input = responses.ResponseNewParamsInputUnion{
 187		OfInputItemList: input,
 188	}
 189
 190	if call.Temperature != nil {
 191		params.Temperature = param.NewOpt(*call.Temperature)
 192	}
 193	if call.TopP != nil {
 194		params.TopP = param.NewOpt(*call.TopP)
 195	}
 196	if call.MaxOutputTokens != nil {
 197		params.MaxOutputTokens = param.NewOpt(*call.MaxOutputTokens)
 198	}
 199
 200	if openaiOptions != nil {
 201		if openaiOptions.MaxToolCalls != nil {
 202			params.MaxToolCalls = param.NewOpt(*openaiOptions.MaxToolCalls)
 203		}
 204		if openaiOptions.Metadata != nil {
 205			metadata := make(shared.Metadata)
 206			for k, v := range openaiOptions.Metadata {
 207				if str, ok := v.(string); ok {
 208					metadata[k] = str
 209				}
 210			}
 211			params.Metadata = metadata
 212		}
 213		if openaiOptions.ParallelToolCalls != nil {
 214			params.ParallelToolCalls = param.NewOpt(*openaiOptions.ParallelToolCalls)
 215		}
 216		if openaiOptions.User != nil {
 217			params.User = param.NewOpt(*openaiOptions.User)
 218		}
 219		if openaiOptions.Instructions != nil {
 220			params.Instructions = param.NewOpt(*openaiOptions.Instructions)
 221		}
 222		if openaiOptions.ServiceTier != nil {
 223			params.ServiceTier = responses.ResponseNewParamsServiceTier(*openaiOptions.ServiceTier)
 224		}
 225		if openaiOptions.PromptCacheKey != nil {
 226			params.PromptCacheKey = param.NewOpt(*openaiOptions.PromptCacheKey)
 227		}
 228		if openaiOptions.SafetyIdentifier != nil {
 229			params.SafetyIdentifier = param.NewOpt(*openaiOptions.SafetyIdentifier)
 230		}
 231		if topLogprobs > 0 {
 232			params.TopLogprobs = param.NewOpt(int64(topLogprobs))
 233		}
 234
 235		if len(openaiOptions.Include) > 0 {
 236			include = append(include, openaiOptions.Include...)
 237		}
 238
 239		if modelConfig.isReasoningModel && (openaiOptions.ReasoningEffort != nil || openaiOptions.ReasoningSummary != nil) {
 240			reasoning := shared.ReasoningParam{}
 241			if openaiOptions.ReasoningEffort != nil {
 242				reasoning.Effort = shared.ReasoningEffort(*openaiOptions.ReasoningEffort)
 243			}
 244			if openaiOptions.ReasoningSummary != nil {
 245				reasoning.Summary = shared.ReasoningSummary(*openaiOptions.ReasoningSummary)
 246			}
 247			params.Reasoning = reasoning
 248		}
 249	}
 250
 251	if modelConfig.requiredAutoTruncation {
 252		params.Truncation = responses.ResponseNewParamsTruncationAuto
 253	}
 254
 255	if len(include) > 0 {
 256		includeParams := make([]responses.ResponseIncludable, len(include))
 257		for i, inc := range include {
 258			includeParams[i] = responses.ResponseIncludable(string(inc))
 259		}
 260		params.Include = includeParams
 261	}
 262
 263	if modelConfig.isReasoningModel {
 264		if call.Temperature != nil {
 265			params.Temperature = param.Opt[float64]{}
 266			warnings = append(warnings, fantasy.CallWarning{
 267				Type:    fantasy.CallWarningTypeUnsupportedSetting,
 268				Setting: "temperature",
 269				Details: "temperature is not supported for reasoning models",
 270			})
 271		}
 272
 273		if call.TopP != nil {
 274			params.TopP = param.Opt[float64]{}
 275			warnings = append(warnings, fantasy.CallWarning{
 276				Type:    fantasy.CallWarningTypeUnsupportedSetting,
 277				Setting: "topP",
 278				Details: "topP is not supported for reasoning models",
 279			})
 280		}
 281	} else {
 282		if openaiOptions != nil {
 283			if openaiOptions.ReasoningEffort != nil {
 284				warnings = append(warnings, fantasy.CallWarning{
 285					Type:    fantasy.CallWarningTypeUnsupportedSetting,
 286					Setting: "reasoningEffort",
 287					Details: "reasoningEffort is not supported for non-reasoning models",
 288				})
 289			}
 290
 291			if openaiOptions.ReasoningSummary != nil {
 292				warnings = append(warnings, fantasy.CallWarning{
 293					Type:    fantasy.CallWarningTypeUnsupportedSetting,
 294					Setting: "reasoningSummary",
 295					Details: "reasoningSummary is not supported for non-reasoning models",
 296				})
 297			}
 298		}
 299	}
 300
 301	if openaiOptions != nil && openaiOptions.ServiceTier != nil {
 302		if *openaiOptions.ServiceTier == ServiceTierFlex && !modelConfig.supportsFlexProcessing {
 303			warnings = append(warnings, fantasy.CallWarning{
 304				Type:    fantasy.CallWarningTypeUnsupportedSetting,
 305				Setting: "serviceTier",
 306				Details: "flex processing is only available for o3, o4-mini, and gpt-5 models",
 307			})
 308			params.ServiceTier = ""
 309		}
 310
 311		if *openaiOptions.ServiceTier == ServiceTierPriority && !modelConfig.supportsPriorityProcessing {
 312			warnings = append(warnings, fantasy.CallWarning{
 313				Type:    fantasy.CallWarningTypeUnsupportedSetting,
 314				Setting: "serviceTier",
 315				Details: "priority processing is only available for supported models (gpt-4, gpt-5, gpt-5-mini, o3, o4-mini) and requires Enterprise access. gpt-5-nano is not supported",
 316			})
 317			params.ServiceTier = ""
 318		}
 319	}
 320
 321	tools, toolChoice, toolWarnings := toResponsesTools(call.Tools, call.ToolChoice, openaiOptions)
 322	warnings = append(warnings, toolWarnings...)
 323
 324	if len(tools) > 0 {
 325		params.Tools = tools
 326		params.ToolChoice = toolChoice
 327	}
 328
 329	return params, warnings
 330}
 331
 332func toResponsesPrompt(prompt fantasy.Prompt, systemMessageMode string) (responses.ResponseInputParam, []fantasy.CallWarning) {
 333	var input responses.ResponseInputParam
 334	var warnings []fantasy.CallWarning
 335
 336	for _, msg := range prompt {
 337		switch msg.Role {
 338		case fantasy.MessageRoleSystem:
 339			var systemText string
 340			for _, c := range msg.Content {
 341				if c.GetType() != fantasy.ContentTypeText {
 342					warnings = append(warnings, fantasy.CallWarning{
 343						Type:    fantasy.CallWarningTypeOther,
 344						Message: "system prompt can only have text content",
 345					})
 346					continue
 347				}
 348				textPart, ok := fantasy.AsContentType[fantasy.TextPart](c)
 349				if !ok {
 350					warnings = append(warnings, fantasy.CallWarning{
 351						Type:    fantasy.CallWarningTypeOther,
 352						Message: "system prompt text part does not have the right type",
 353					})
 354					continue
 355				}
 356				if strings.TrimSpace(textPart.Text) != "" {
 357					systemText += textPart.Text
 358				}
 359			}
 360
 361			if systemText == "" {
 362				warnings = append(warnings, fantasy.CallWarning{
 363					Type:    fantasy.CallWarningTypeOther,
 364					Message: "system prompt has no text parts",
 365				})
 366				continue
 367			}
 368
 369			switch systemMessageMode {
 370			case "system":
 371				input = append(input, responses.ResponseInputItemParamOfMessage(systemText, responses.EasyInputMessageRoleSystem))
 372			case "developer":
 373				input = append(input, responses.ResponseInputItemParamOfMessage(systemText, responses.EasyInputMessageRoleDeveloper))
 374			case "remove":
 375				warnings = append(warnings, fantasy.CallWarning{
 376					Type:    fantasy.CallWarningTypeOther,
 377					Message: "system messages are removed for this model",
 378				})
 379			}
 380
 381		case fantasy.MessageRoleUser:
 382			var contentParts responses.ResponseInputMessageContentListParam
 383			for i, c := range msg.Content {
 384				switch c.GetType() {
 385				case fantasy.ContentTypeText:
 386					textPart, ok := fantasy.AsContentType[fantasy.TextPart](c)
 387					if !ok {
 388						warnings = append(warnings, fantasy.CallWarning{
 389							Type:    fantasy.CallWarningTypeOther,
 390							Message: "user message text part does not have the right type",
 391						})
 392						continue
 393					}
 394					contentParts = append(contentParts, responses.ResponseInputContentUnionParam{
 395						OfInputText: &responses.ResponseInputTextParam{
 396							Type: "input_text",
 397							Text: textPart.Text,
 398						},
 399					})
 400
 401				case fantasy.ContentTypeFile:
 402					filePart, ok := fantasy.AsContentType[fantasy.FilePart](c)
 403					if !ok {
 404						warnings = append(warnings, fantasy.CallWarning{
 405							Type:    fantasy.CallWarningTypeOther,
 406							Message: "user message file part does not have the right type",
 407						})
 408						continue
 409					}
 410
 411					if strings.HasPrefix(filePart.MediaType, "image/") {
 412						base64Encoded := base64.StdEncoding.EncodeToString(filePart.Data)
 413						imageURL := fmt.Sprintf("data:%s;base64,%s", filePart.MediaType, base64Encoded)
 414						contentParts = append(contentParts, responses.ResponseInputContentUnionParam{
 415							OfInputImage: &responses.ResponseInputImageParam{
 416								Type:     "input_image",
 417								ImageURL: param.NewOpt(imageURL),
 418							},
 419						})
 420					} else if filePart.MediaType == "application/pdf" {
 421						base64Encoded := base64.StdEncoding.EncodeToString(filePart.Data)
 422						fileData := fmt.Sprintf("data:application/pdf;base64,%s", base64Encoded)
 423						filename := filePart.Filename
 424						if filename == "" {
 425							filename = fmt.Sprintf("part-%d.pdf", i)
 426						}
 427						contentParts = append(contentParts, responses.ResponseInputContentUnionParam{
 428							OfInputFile: &responses.ResponseInputFileParam{
 429								Type:     "input_file",
 430								Filename: param.NewOpt(filename),
 431								FileData: param.NewOpt(fileData),
 432							},
 433						})
 434					} else {
 435						warnings = append(warnings, fantasy.CallWarning{
 436							Type:    fantasy.CallWarningTypeOther,
 437							Message: fmt.Sprintf("file part media type %s not supported", filePart.MediaType),
 438						})
 439					}
 440				}
 441			}
 442
 443			input = append(input, responses.ResponseInputItemParamOfMessage(contentParts, responses.EasyInputMessageRoleUser))
 444
 445		case fantasy.MessageRoleAssistant:
 446			for _, c := range msg.Content {
 447				switch c.GetType() {
 448				case fantasy.ContentTypeText:
 449					textPart, ok := fantasy.AsContentType[fantasy.TextPart](c)
 450					if !ok {
 451						warnings = append(warnings, fantasy.CallWarning{
 452							Type:    fantasy.CallWarningTypeOther,
 453							Message: "assistant message text part does not have the right type",
 454						})
 455						continue
 456					}
 457					input = append(input, responses.ResponseInputItemParamOfMessage(textPart.Text, responses.EasyInputMessageRoleAssistant))
 458
 459				case fantasy.ContentTypeToolCall:
 460					toolCallPart, ok := fantasy.AsContentType[fantasy.ToolCallPart](c)
 461					if !ok {
 462						warnings = append(warnings, fantasy.CallWarning{
 463							Type:    fantasy.CallWarningTypeOther,
 464							Message: "assistant message tool call part does not have the right type",
 465						})
 466						continue
 467					}
 468
 469					if toolCallPart.ProviderExecuted {
 470						continue
 471					}
 472
 473					inputJSON, err := json.Marshal(toolCallPart.Input)
 474					if err != nil {
 475						warnings = append(warnings, fantasy.CallWarning{
 476							Type:    fantasy.CallWarningTypeOther,
 477							Message: fmt.Sprintf("failed to marshal tool call input: %v", err),
 478						})
 479						continue
 480					}
 481
 482					input = append(input, responses.ResponseInputItemParamOfFunctionCall(string(inputJSON), toolCallPart.ToolCallID, toolCallPart.ToolName))
 483				case fantasy.ContentTypeReasoning:
 484					reasoningMetadata := GetReasoningMetadata(c.Options())
 485					if reasoningMetadata == nil || reasoningMetadata.ItemID == "" {
 486						continue
 487					}
 488					if len(reasoningMetadata.Summary) == 0 && reasoningMetadata.EncryptedContent == nil {
 489						warnings = append(warnings, fantasy.CallWarning{
 490							Type:    fantasy.CallWarningTypeOther,
 491							Message: "assistant message reasoning part does is empty",
 492						})
 493						continue
 494					}
 495					// we want to always send an empty array
 496					summary := []responses.ResponseReasoningItemSummaryParam{}
 497					for _, s := range reasoningMetadata.Summary {
 498						summary = append(summary, responses.ResponseReasoningItemSummaryParam{
 499							Type: "summary_text",
 500							Text: s,
 501						})
 502					}
 503					reasoning := &responses.ResponseReasoningItemParam{
 504						ID:      reasoningMetadata.ItemID,
 505						Summary: summary,
 506					}
 507					if reasoningMetadata.EncryptedContent != nil {
 508						reasoning.EncryptedContent = param.NewOpt(*reasoningMetadata.EncryptedContent)
 509					}
 510					input = append(input, responses.ResponseInputItemUnionParam{
 511						OfReasoning: reasoning,
 512					})
 513				}
 514			}
 515
 516		case fantasy.MessageRoleTool:
 517			for _, c := range msg.Content {
 518				if c.GetType() != fantasy.ContentTypeToolResult {
 519					warnings = append(warnings, fantasy.CallWarning{
 520						Type:    fantasy.CallWarningTypeOther,
 521						Message: "tool message can only have tool result content",
 522					})
 523					continue
 524				}
 525
 526				toolResultPart, ok := fantasy.AsContentType[fantasy.ToolResultPart](c)
 527				if !ok {
 528					warnings = append(warnings, fantasy.CallWarning{
 529						Type:    fantasy.CallWarningTypeOther,
 530						Message: "tool message result part does not have the right type",
 531					})
 532					continue
 533				}
 534
 535				var outputStr string
 536				switch toolResultPart.Output.GetType() {
 537				case fantasy.ToolResultContentTypeText:
 538					output, ok := fantasy.AsToolResultOutputType[fantasy.ToolResultOutputContentText](toolResultPart.Output)
 539					if !ok {
 540						warnings = append(warnings, fantasy.CallWarning{
 541							Type:    fantasy.CallWarningTypeOther,
 542							Message: "tool result output does not have the right type",
 543						})
 544						continue
 545					}
 546					outputStr = output.Text
 547				case fantasy.ToolResultContentTypeError:
 548					output, ok := fantasy.AsToolResultOutputType[fantasy.ToolResultOutputContentError](toolResultPart.Output)
 549					if !ok {
 550						warnings = append(warnings, fantasy.CallWarning{
 551							Type:    fantasy.CallWarningTypeOther,
 552							Message: "tool result output does not have the right type",
 553						})
 554						continue
 555					}
 556					outputStr = output.Error.Error()
 557				}
 558
 559				input = append(input, responses.ResponseInputItemParamOfFunctionCallOutput(toolResultPart.ToolCallID, outputStr))
 560			}
 561		}
 562	}
 563
 564	return input, warnings
 565}
 566
 567func toResponsesTools(tools []fantasy.Tool, toolChoice *fantasy.ToolChoice, options *ResponsesProviderOptions) ([]responses.ToolUnionParam, responses.ResponseNewParamsToolChoiceUnion, []fantasy.CallWarning) {
 568	warnings := make([]fantasy.CallWarning, 0)
 569	var openaiTools []responses.ToolUnionParam
 570
 571	if len(tools) == 0 {
 572		return nil, responses.ResponseNewParamsToolChoiceUnion{}, nil
 573	}
 574
 575	strictJSONSchema := false
 576	if options != nil && options.StrictJSONSchema != nil {
 577		strictJSONSchema = *options.StrictJSONSchema
 578	}
 579
 580	for _, tool := range tools {
 581		if tool.GetType() == fantasy.ToolTypeFunction {
 582			ft, ok := tool.(fantasy.FunctionTool)
 583			if !ok {
 584				continue
 585			}
 586			openaiTools = append(openaiTools, responses.ToolUnionParam{
 587				OfFunction: &responses.FunctionToolParam{
 588					Name:        ft.Name,
 589					Description: param.NewOpt(ft.Description),
 590					Parameters:  ft.InputSchema,
 591					Strict:      param.NewOpt(strictJSONSchema),
 592					Type:        "function",
 593				},
 594			})
 595			continue
 596		}
 597
 598		warnings = append(warnings, fantasy.CallWarning{
 599			Type:    fantasy.CallWarningTypeUnsupportedTool,
 600			Tool:    tool,
 601			Message: "tool is not supported",
 602		})
 603	}
 604
 605	if toolChoice == nil {
 606		return openaiTools, responses.ResponseNewParamsToolChoiceUnion{}, warnings
 607	}
 608
 609	var openaiToolChoice responses.ResponseNewParamsToolChoiceUnion
 610
 611	switch *toolChoice {
 612	case fantasy.ToolChoiceAuto:
 613		openaiToolChoice = responses.ResponseNewParamsToolChoiceUnion{
 614			OfToolChoiceMode: param.NewOpt(responses.ToolChoiceOptionsAuto),
 615		}
 616	case fantasy.ToolChoiceNone:
 617		openaiToolChoice = responses.ResponseNewParamsToolChoiceUnion{
 618			OfToolChoiceMode: param.NewOpt(responses.ToolChoiceOptionsNone),
 619		}
 620	case fantasy.ToolChoiceRequired:
 621		openaiToolChoice = responses.ResponseNewParamsToolChoiceUnion{
 622			OfToolChoiceMode: param.NewOpt(responses.ToolChoiceOptionsRequired),
 623		}
 624	default:
 625		openaiToolChoice = responses.ResponseNewParamsToolChoiceUnion{
 626			OfFunctionTool: &responses.ToolChoiceFunctionParam{
 627				Type: "function",
 628				Name: string(*toolChoice),
 629			},
 630		}
 631	}
 632
 633	return openaiTools, openaiToolChoice, warnings
 634}
 635
 636func (o responsesLanguageModel) Generate(ctx context.Context, call fantasy.Call) (*fantasy.Response, error) {
 637	params, warnings := o.prepareParams(call)
 638	response, err := o.client.Responses.New(ctx, *params)
 639	if err != nil {
 640		return nil, toProviderErr(err)
 641	}
 642
 643	if response.Error.Message != "" {
 644		return nil, &fantasy.Error{
 645			Title:   "provider error",
 646			Message: fmt.Sprintf("%s (code: %s)", response.Error.Message, response.Error.Code),
 647		}
 648	}
 649
 650	var content []fantasy.Content
 651	hasFunctionCall := false
 652
 653	for _, outputItem := range response.Output {
 654		switch outputItem.Type {
 655		case "message":
 656			for _, contentPart := range outputItem.Content {
 657				if contentPart.Type == "output_text" {
 658					content = append(content, fantasy.TextContent{
 659						Text: contentPart.Text,
 660					})
 661
 662					for _, annotation := range contentPart.Annotations {
 663						switch annotation.Type {
 664						case "url_citation":
 665							content = append(content, fantasy.SourceContent{
 666								SourceType: fantasy.SourceTypeURL,
 667								ID:         uuid.NewString(),
 668								URL:        annotation.URL,
 669								Title:      annotation.Title,
 670							})
 671						case "file_citation":
 672							title := "Document"
 673							if annotation.Filename != "" {
 674								title = annotation.Filename
 675							}
 676							filename := annotation.Filename
 677							if filename == "" {
 678								filename = annotation.FileID
 679							}
 680							content = append(content, fantasy.SourceContent{
 681								SourceType: fantasy.SourceTypeDocument,
 682								ID:         uuid.NewString(),
 683								MediaType:  "text/plain",
 684								Title:      title,
 685								Filename:   filename,
 686							})
 687						}
 688					}
 689				}
 690			}
 691
 692		case "function_call":
 693			hasFunctionCall = true
 694			content = append(content, fantasy.ToolCallContent{
 695				ProviderExecuted: false,
 696				ToolCallID:       outputItem.CallID,
 697				ToolName:         outputItem.Name,
 698				Input:            outputItem.Arguments,
 699			})
 700
 701		case "reasoning":
 702			metadata := &ResponsesReasoningMetadata{
 703				ItemID: outputItem.ID,
 704			}
 705			if outputItem.EncryptedContent != "" {
 706				metadata.EncryptedContent = &outputItem.EncryptedContent
 707			}
 708
 709			if len(outputItem.Summary) == 0 && metadata.EncryptedContent == nil {
 710				continue
 711			}
 712
 713			// When there are no summary parts, add an empty reasoning part
 714			summaries := outputItem.Summary
 715			if len(summaries) == 0 {
 716				summaries = []responses.ResponseReasoningItemSummary{{Type: "summary_text", Text: ""}}
 717			}
 718
 719			for _, s := range summaries {
 720				metadata.Summary = append(metadata.Summary, s.Text)
 721			}
 722
 723			content = append(content, fantasy.ReasoningContent{
 724				Text: strings.Join(metadata.Summary, "\n"),
 725				ProviderMetadata: fantasy.ProviderMetadata{
 726					Name: metadata,
 727				},
 728			})
 729		}
 730	}
 731
 732	usage := fantasy.Usage{
 733		InputTokens:  response.Usage.InputTokens,
 734		OutputTokens: response.Usage.OutputTokens,
 735		TotalTokens:  response.Usage.InputTokens + response.Usage.OutputTokens,
 736	}
 737
 738	if response.Usage.OutputTokensDetails.ReasoningTokens != 0 {
 739		usage.ReasoningTokens = response.Usage.OutputTokensDetails.ReasoningTokens
 740	}
 741	if response.Usage.InputTokensDetails.CachedTokens != 0 {
 742		usage.CacheReadTokens = response.Usage.InputTokensDetails.CachedTokens
 743	}
 744
 745	finishReason := mapResponsesFinishReason(response.IncompleteDetails.Reason, hasFunctionCall)
 746
 747	return &fantasy.Response{
 748		Content:          content,
 749		Usage:            usage,
 750		FinishReason:     finishReason,
 751		ProviderMetadata: fantasy.ProviderMetadata{},
 752		Warnings:         warnings,
 753	}, nil
 754}
 755
 756func mapResponsesFinishReason(reason string, hasFunctionCall bool) fantasy.FinishReason {
 757	if hasFunctionCall {
 758		return fantasy.FinishReasonToolCalls
 759	}
 760
 761	switch reason {
 762	case "":
 763		return fantasy.FinishReasonStop
 764	case "max_tokens", "max_output_tokens":
 765		return fantasy.FinishReasonLength
 766	case "content_filter":
 767		return fantasy.FinishReasonContentFilter
 768	default:
 769		return fantasy.FinishReasonOther
 770	}
 771}
 772
 773func (o responsesLanguageModel) Stream(ctx context.Context, call fantasy.Call) (fantasy.StreamResponse, error) {
 774	params, warnings := o.prepareParams(call)
 775
 776	stream := o.client.Responses.NewStreaming(ctx, *params)
 777
 778	finishReason := fantasy.FinishReasonUnknown
 779	var usage fantasy.Usage
 780	ongoingToolCalls := make(map[int64]*ongoingToolCall)
 781	hasFunctionCall := false
 782	activeReasoning := make(map[string]*reasoningState)
 783
 784	return func(yield func(fantasy.StreamPart) bool) {
 785		if len(warnings) > 0 {
 786			if !yield(fantasy.StreamPart{
 787				Type:     fantasy.StreamPartTypeWarnings,
 788				Warnings: warnings,
 789			}) {
 790				return
 791			}
 792		}
 793
 794		for stream.Next() {
 795			event := stream.Current()
 796
 797			switch event.Type {
 798			case "response.created":
 799				_ = event.AsResponseCreated()
 800
 801			case "response.output_item.added":
 802				added := event.AsResponseOutputItemAdded()
 803				switch added.Item.Type {
 804				case "function_call":
 805					ongoingToolCalls[added.OutputIndex] = &ongoingToolCall{
 806						toolName:   added.Item.Name,
 807						toolCallID: added.Item.CallID,
 808					}
 809					if !yield(fantasy.StreamPart{
 810						Type:         fantasy.StreamPartTypeToolInputStart,
 811						ID:           added.Item.CallID,
 812						ToolCallName: added.Item.Name,
 813					}) {
 814						return
 815					}
 816
 817				case "message":
 818					if !yield(fantasy.StreamPart{
 819						Type: fantasy.StreamPartTypeTextStart,
 820						ID:   added.Item.ID,
 821					}) {
 822						return
 823					}
 824
 825				case "reasoning":
 826					metadata := &ResponsesReasoningMetadata{
 827						ItemID:  added.Item.ID,
 828						Summary: []string{},
 829					}
 830					if added.Item.EncryptedContent != "" {
 831						metadata.EncryptedContent = &added.Item.EncryptedContent
 832					}
 833
 834					activeReasoning[added.Item.ID] = &reasoningState{
 835						metadata: metadata,
 836					}
 837					if !yield(fantasy.StreamPart{
 838						Type: fantasy.StreamPartTypeReasoningStart,
 839						ID:   added.Item.ID,
 840						ProviderMetadata: fantasy.ProviderMetadata{
 841							Name: metadata,
 842						},
 843					}) {
 844						return
 845					}
 846				}
 847
 848			case "response.output_item.done":
 849				done := event.AsResponseOutputItemDone()
 850				switch done.Item.Type {
 851				case "function_call":
 852					tc := ongoingToolCalls[done.OutputIndex]
 853					if tc != nil {
 854						delete(ongoingToolCalls, done.OutputIndex)
 855						hasFunctionCall = true
 856
 857						if !yield(fantasy.StreamPart{
 858							Type: fantasy.StreamPartTypeToolInputEnd,
 859							ID:   done.Item.CallID,
 860						}) {
 861							return
 862						}
 863						if !yield(fantasy.StreamPart{
 864							Type:          fantasy.StreamPartTypeToolCall,
 865							ID:            done.Item.CallID,
 866							ToolCallName:  done.Item.Name,
 867							ToolCallInput: done.Item.Arguments,
 868						}) {
 869							return
 870						}
 871					}
 872
 873				case "message":
 874					if !yield(fantasy.StreamPart{
 875						Type: fantasy.StreamPartTypeTextEnd,
 876						ID:   done.Item.ID,
 877					}) {
 878						return
 879					}
 880
 881				case "reasoning":
 882					state := activeReasoning[done.Item.ID]
 883					if state != nil {
 884						if !yield(fantasy.StreamPart{
 885							Type: fantasy.StreamPartTypeReasoningEnd,
 886							ID:   done.Item.ID,
 887							ProviderMetadata: fantasy.ProviderMetadata{
 888								Name: state.metadata,
 889							},
 890						}) {
 891							return
 892						}
 893						delete(activeReasoning, done.Item.ID)
 894					}
 895				}
 896
 897			case "response.function_call_arguments.delta":
 898				delta := event.AsResponseFunctionCallArgumentsDelta()
 899				tc := ongoingToolCalls[delta.OutputIndex]
 900				if tc != nil {
 901					if !yield(fantasy.StreamPart{
 902						Type:  fantasy.StreamPartTypeToolInputDelta,
 903						ID:    tc.toolCallID,
 904						Delta: delta.Delta,
 905					}) {
 906						return
 907					}
 908				}
 909
 910			case "response.output_text.delta":
 911				textDelta := event.AsResponseOutputTextDelta()
 912				if !yield(fantasy.StreamPart{
 913					Type:  fantasy.StreamPartTypeTextDelta,
 914					ID:    textDelta.ItemID,
 915					Delta: textDelta.Delta,
 916				}) {
 917					return
 918				}
 919
 920			case "response.reasoning_summary_part.added":
 921				added := event.AsResponseReasoningSummaryPartAdded()
 922				state := activeReasoning[added.ItemID]
 923				if state != nil {
 924					state.metadata.Summary = append(state.metadata.Summary, "")
 925					activeReasoning[added.ItemID] = state
 926					if !yield(fantasy.StreamPart{
 927						Type:  fantasy.StreamPartTypeReasoningDelta,
 928						ID:    added.ItemID,
 929						Delta: "\n",
 930						ProviderMetadata: fantasy.ProviderMetadata{
 931							Name: state.metadata,
 932						},
 933					}) {
 934						return
 935					}
 936				}
 937
 938			case "response.reasoning_summary_text.delta":
 939				textDelta := event.AsResponseReasoningSummaryTextDelta()
 940				state := activeReasoning[textDelta.ItemID]
 941				if state != nil {
 942					if len(state.metadata.Summary)-1 >= int(textDelta.SummaryIndex) {
 943						state.metadata.Summary[textDelta.SummaryIndex] += textDelta.Delta
 944					}
 945					activeReasoning[textDelta.ItemID] = state
 946					if !yield(fantasy.StreamPart{
 947						Type:  fantasy.StreamPartTypeReasoningDelta,
 948						ID:    textDelta.ItemID,
 949						Delta: textDelta.Delta,
 950						ProviderMetadata: fantasy.ProviderMetadata{
 951							Name: state.metadata,
 952						},
 953					}) {
 954						return
 955					}
 956				}
 957
 958			case "response.completed", "response.incomplete":
 959				completed := event.AsResponseCompleted()
 960				finishReason = mapResponsesFinishReason(completed.Response.IncompleteDetails.Reason, hasFunctionCall)
 961				usage = fantasy.Usage{
 962					InputTokens:  completed.Response.Usage.InputTokens,
 963					OutputTokens: completed.Response.Usage.OutputTokens,
 964					TotalTokens:  completed.Response.Usage.InputTokens + completed.Response.Usage.OutputTokens,
 965				}
 966				if completed.Response.Usage.OutputTokensDetails.ReasoningTokens != 0 {
 967					usage.ReasoningTokens = completed.Response.Usage.OutputTokensDetails.ReasoningTokens
 968				}
 969				if completed.Response.Usage.InputTokensDetails.CachedTokens != 0 {
 970					usage.CacheReadTokens = completed.Response.Usage.InputTokensDetails.CachedTokens
 971				}
 972
 973			case "error":
 974				errorEvent := event.AsError()
 975				if !yield(fantasy.StreamPart{
 976					Type:  fantasy.StreamPartTypeError,
 977					Error: fmt.Errorf("response error: %s (code: %s)", errorEvent.Message, errorEvent.Code),
 978				}) {
 979					return
 980				}
 981				return
 982			}
 983		}
 984
 985		err := stream.Err()
 986		if err != nil {
 987			yield(fantasy.StreamPart{
 988				Type:  fantasy.StreamPartTypeError,
 989				Error: toProviderErr(err),
 990			})
 991			return
 992		}
 993
 994		yield(fantasy.StreamPart{
 995			Type:         fantasy.StreamPartTypeFinish,
 996			Usage:        usage,
 997			FinishReason: finishReason,
 998		})
 999	}, nil
1000}
1001
1002// GetReasoningMetadata extracts reasoning metadata from provider options for responses models.
1003func GetReasoningMetadata(providerOptions fantasy.ProviderOptions) *ResponsesReasoningMetadata {
1004	if openaiResponsesOptions, ok := providerOptions[Name]; ok {
1005		if reasoning, ok := openaiResponsesOptions.(*ResponsesReasoningMetadata); ok {
1006			return reasoning
1007		}
1008	}
1009	return nil
1010}
1011
1012type ongoingToolCall struct {
1013	toolName   string
1014	toolCallID string
1015}
1016
1017type reasoningState struct {
1018	metadata *ResponsesReasoningMetadata
1019}
1020
1021// GenerateObject implements fantasy.LanguageModel.
1022func (o responsesLanguageModel) GenerateObject(ctx context.Context, call fantasy.ObjectCall) (*fantasy.ObjectResponse, error) {
1023	switch o.objectMode {
1024	case fantasy.ObjectModeText:
1025		return object.GenerateWithText(ctx, o, call)
1026	case fantasy.ObjectModeTool:
1027		return object.GenerateWithTool(ctx, o, call)
1028	default:
1029		return o.generateObjectWithJSONMode(ctx, call)
1030	}
1031}
1032
1033// StreamObject implements fantasy.LanguageModel.
1034func (o responsesLanguageModel) StreamObject(ctx context.Context, call fantasy.ObjectCall) (fantasy.ObjectStreamResponse, error) {
1035	switch o.objectMode {
1036	case fantasy.ObjectModeTool:
1037		return object.StreamWithTool(ctx, o, call)
1038	case fantasy.ObjectModeText:
1039		return object.StreamWithText(ctx, o, call)
1040	default:
1041		return o.streamObjectWithJSONMode(ctx, call)
1042	}
1043}
1044
1045func (o responsesLanguageModel) generateObjectWithJSONMode(ctx context.Context, call fantasy.ObjectCall) (*fantasy.ObjectResponse, error) {
1046	// Convert our Schema to OpenAI's JSON Schema format
1047	jsonSchemaMap := schema.ToMap(call.Schema)
1048
1049	// Add additionalProperties: false recursively for strict mode (OpenAI requirement)
1050	addAdditionalPropertiesFalse(jsonSchemaMap)
1051
1052	schemaName := call.SchemaName
1053	if schemaName == "" {
1054		schemaName = "response"
1055	}
1056
1057	// Build request using prepareParams
1058	fantasyCall := fantasy.Call{
1059		Prompt:           call.Prompt,
1060		MaxOutputTokens:  call.MaxOutputTokens,
1061		Temperature:      call.Temperature,
1062		TopP:             call.TopP,
1063		PresencePenalty:  call.PresencePenalty,
1064		FrequencyPenalty: call.FrequencyPenalty,
1065		ProviderOptions:  call.ProviderOptions,
1066	}
1067
1068	params, warnings := o.prepareParams(fantasyCall)
1069
1070	// Add structured output via Text.Format field
1071	params.Text = responses.ResponseTextConfigParam{
1072		Format: responses.ResponseFormatTextConfigParamOfJSONSchema(schemaName, jsonSchemaMap),
1073	}
1074
1075	// Make request
1076	response, err := o.client.Responses.New(ctx, *params)
1077	if err != nil {
1078		return nil, toProviderErr(err)
1079	}
1080
1081	if response.Error.Message != "" {
1082		return nil, &fantasy.Error{
1083			Title:   "provider error",
1084			Message: fmt.Sprintf("%s (code: %s)", response.Error.Message, response.Error.Code),
1085		}
1086	}
1087
1088	// Extract JSON text from response
1089	var jsonText string
1090	for _, outputItem := range response.Output {
1091		if outputItem.Type == "message" {
1092			for _, contentPart := range outputItem.Content {
1093				if contentPart.Type == "output_text" {
1094					jsonText = contentPart.Text
1095					break
1096				}
1097			}
1098		}
1099	}
1100
1101	if jsonText == "" {
1102		usage := fantasy.Usage{
1103			InputTokens:  response.Usage.InputTokens,
1104			OutputTokens: response.Usage.OutputTokens,
1105			TotalTokens:  response.Usage.InputTokens + response.Usage.OutputTokens,
1106		}
1107		finishReason := mapResponsesFinishReason(response.IncompleteDetails.Reason, false)
1108		return nil, &fantasy.NoObjectGeneratedError{
1109			RawText:      "",
1110			ParseError:   fmt.Errorf("no text content in response"),
1111			Usage:        usage,
1112			FinishReason: finishReason,
1113		}
1114	}
1115
1116	// Parse and validate
1117	var obj any
1118	if call.RepairText != nil {
1119		obj, err = schema.ParseAndValidateWithRepair(ctx, jsonText, call.Schema, call.RepairText)
1120	} else {
1121		obj, err = schema.ParseAndValidate(jsonText, call.Schema)
1122	}
1123
1124	usage := fantasy.Usage{
1125		InputTokens:  response.Usage.InputTokens,
1126		OutputTokens: response.Usage.OutputTokens,
1127		TotalTokens:  response.Usage.InputTokens + response.Usage.OutputTokens,
1128	}
1129	if response.Usage.OutputTokensDetails.ReasoningTokens != 0 {
1130		usage.ReasoningTokens = response.Usage.OutputTokensDetails.ReasoningTokens
1131	}
1132	if response.Usage.InputTokensDetails.CachedTokens != 0 {
1133		usage.CacheReadTokens = response.Usage.InputTokensDetails.CachedTokens
1134	}
1135
1136	finishReason := mapResponsesFinishReason(response.IncompleteDetails.Reason, false)
1137
1138	if err != nil {
1139		// Add usage info to error
1140		if nogErr, ok := err.(*fantasy.NoObjectGeneratedError); ok {
1141			nogErr.Usage = usage
1142			nogErr.FinishReason = finishReason
1143		}
1144		return nil, err
1145	}
1146
1147	return &fantasy.ObjectResponse{
1148		Object:       obj,
1149		RawText:      jsonText,
1150		Usage:        usage,
1151		FinishReason: finishReason,
1152		Warnings:     warnings,
1153	}, nil
1154}
1155
1156func (o responsesLanguageModel) streamObjectWithJSONMode(ctx context.Context, call fantasy.ObjectCall) (fantasy.ObjectStreamResponse, error) {
1157	// Convert our Schema to OpenAI's JSON Schema format
1158	jsonSchemaMap := schema.ToMap(call.Schema)
1159
1160	// Add additionalProperties: false recursively for strict mode (OpenAI requirement)
1161	addAdditionalPropertiesFalse(jsonSchemaMap)
1162
1163	schemaName := call.SchemaName
1164	if schemaName == "" {
1165		schemaName = "response"
1166	}
1167
1168	// Build request using prepareParams
1169	fantasyCall := fantasy.Call{
1170		Prompt:           call.Prompt,
1171		MaxOutputTokens:  call.MaxOutputTokens,
1172		Temperature:      call.Temperature,
1173		TopP:             call.TopP,
1174		PresencePenalty:  call.PresencePenalty,
1175		FrequencyPenalty: call.FrequencyPenalty,
1176		ProviderOptions:  call.ProviderOptions,
1177	}
1178
1179	params, warnings := o.prepareParams(fantasyCall)
1180
1181	// Add structured output via Text.Format field
1182	params.Text = responses.ResponseTextConfigParam{
1183		Format: responses.ResponseFormatTextConfigParamOfJSONSchema(schemaName, jsonSchemaMap),
1184	}
1185
1186	stream := o.client.Responses.NewStreaming(ctx, *params)
1187
1188	return func(yield func(fantasy.ObjectStreamPart) bool) {
1189		if len(warnings) > 0 {
1190			if !yield(fantasy.ObjectStreamPart{
1191				Type:     fantasy.ObjectStreamPartTypeObject,
1192				Warnings: warnings,
1193			}) {
1194				return
1195			}
1196		}
1197
1198		var accumulated string
1199		var lastParsedObject any
1200		var usage fantasy.Usage
1201		var finishReason fantasy.FinishReason
1202		var streamErr error
1203		hasFunctionCall := false
1204
1205		for stream.Next() {
1206			event := stream.Current()
1207
1208			switch event.Type {
1209			case "response.output_text.delta":
1210				textDelta := event.AsResponseOutputTextDelta()
1211				accumulated += textDelta.Delta
1212
1213				// Try to parse the accumulated text
1214				obj, state, parseErr := schema.ParsePartialJSON(accumulated)
1215
1216				// If we successfully parsed, validate and emit
1217				if state == schema.ParseStateSuccessful || state == schema.ParseStateRepaired {
1218					if err := schema.ValidateAgainstSchema(obj, call.Schema); err == nil {
1219						// Only emit if object is different from last
1220						if !reflect.DeepEqual(obj, lastParsedObject) {
1221							if !yield(fantasy.ObjectStreamPart{
1222								Type:   fantasy.ObjectStreamPartTypeObject,
1223								Object: obj,
1224							}) {
1225								return
1226							}
1227							lastParsedObject = obj
1228						}
1229					}
1230				}
1231
1232				// If parsing failed and we have a repair function, try it
1233				if state == schema.ParseStateFailed && call.RepairText != nil {
1234					repairedText, repairErr := call.RepairText(ctx, accumulated, parseErr)
1235					if repairErr == nil {
1236						obj2, state2, _ := schema.ParsePartialJSON(repairedText)
1237						if (state2 == schema.ParseStateSuccessful || state2 == schema.ParseStateRepaired) &&
1238							schema.ValidateAgainstSchema(obj2, call.Schema) == nil {
1239							if !reflect.DeepEqual(obj2, lastParsedObject) {
1240								if !yield(fantasy.ObjectStreamPart{
1241									Type:   fantasy.ObjectStreamPartTypeObject,
1242									Object: obj2,
1243								}) {
1244									return
1245								}
1246								lastParsedObject = obj2
1247							}
1248						}
1249					}
1250				}
1251
1252			case "response.completed", "response.incomplete":
1253				completed := event.AsResponseCompleted()
1254				finishReason = mapResponsesFinishReason(completed.Response.IncompleteDetails.Reason, hasFunctionCall)
1255				usage = fantasy.Usage{
1256					InputTokens:  completed.Response.Usage.InputTokens,
1257					OutputTokens: completed.Response.Usage.OutputTokens,
1258					TotalTokens:  completed.Response.Usage.InputTokens + completed.Response.Usage.OutputTokens,
1259				}
1260				if completed.Response.Usage.OutputTokensDetails.ReasoningTokens != 0 {
1261					usage.ReasoningTokens = completed.Response.Usage.OutputTokensDetails.ReasoningTokens
1262				}
1263				if completed.Response.Usage.InputTokensDetails.CachedTokens != 0 {
1264					usage.CacheReadTokens = completed.Response.Usage.InputTokensDetails.CachedTokens
1265				}
1266
1267			case "error":
1268				errorEvent := event.AsError()
1269				streamErr = fmt.Errorf("response error: %s (code: %s)", errorEvent.Message, errorEvent.Code)
1270				if !yield(fantasy.ObjectStreamPart{
1271					Type:  fantasy.ObjectStreamPartTypeError,
1272					Error: streamErr,
1273				}) {
1274					return
1275				}
1276				return
1277			}
1278		}
1279
1280		err := stream.Err()
1281		if err != nil {
1282			yield(fantasy.ObjectStreamPart{
1283				Type:  fantasy.ObjectStreamPartTypeError,
1284				Error: toProviderErr(err),
1285			})
1286			return
1287		}
1288
1289		// Final validation and emit
1290		if streamErr == nil && lastParsedObject != nil {
1291			yield(fantasy.ObjectStreamPart{
1292				Type:         fantasy.ObjectStreamPartTypeFinish,
1293				Usage:        usage,
1294				FinishReason: finishReason,
1295			})
1296		} else if streamErr == nil && lastParsedObject == nil {
1297			// No object was generated
1298			yield(fantasy.ObjectStreamPart{
1299				Type: fantasy.ObjectStreamPartTypeError,
1300				Error: &fantasy.NoObjectGeneratedError{
1301					RawText:      accumulated,
1302					ParseError:   fmt.Errorf("no valid object generated in stream"),
1303					Usage:        usage,
1304					FinishReason: finishReason,
1305				},
1306			})
1307		}
1308	}, nil
1309}