diff --git a/providers/openai/language_model.go b/providers/openai/language_model.go index 96b4514b5902f1b30ea179b6abbd6261ebc86f3c..4515dabbd9c89e1ab19279b470acd71d5812209d 100644 --- a/providers/openai/language_model.go +++ b/providers/openai/language_model.go @@ -1,6 +1,7 @@ package openai import ( + "cmp" "context" "encoding/json" "errors" @@ -410,20 +411,18 @@ func (o languageModel) Stream(ctx context.Context, call fantasy.Call) (fantasy.S toolCalls[toolCallDelta.Index] = existingToolCall } } else { - var err error - if toolCallDelta.Type != "function" { - err = &fantasy.Error{Title: "invalid provider response", Message: "expected 'function' type."} - } - if toolCallDelta.ID == "" { - err = &fantasy.Error{Title: "invalid provider response", Message: "expected 'id' to be a string."} - } - if toolCallDelta.Function.Name == "" { - err = &fantasy.Error{Title: "invalid provider response", Message: "expected 'function.name' to be a string."} + // Some provider like Ollama may send empty tool calls or miss some fields. + // We'll skip when we don't have enough info and also assume sane defaults. + if toolCallDelta.Function.Name == "" && toolCallDelta.Function.Arguments == "" { + continue } - if err != nil { + toolCallDelta.Type = cmp.Or(toolCallDelta.Type, "function") + toolCallDelta.ID = cmp.Or(toolCallDelta.ID, fmt.Sprintf("tool-call-%d", toolCallDelta.Index)) + + if toolCallDelta.Type != "function" { yield(fantasy.StreamPart{ Type: fantasy.StreamPartTypeError, - Error: toProviderErr(stream.Err()), + Error: &fantasy.Error{Title: "invalid provider response", Message: "expected 'function' type."}, }) return } @@ -453,7 +452,7 @@ func (o languageModel) Stream(ctx context.Context, call fantasy.Call) (fantasy.S if xjson.IsValid(toolCalls[toolCallDelta.Index].arguments) { if !yield(fantasy.StreamPart{ Type: fantasy.StreamPartTypeToolInputEnd, - ID: toolCallDelta.ID, + ID: exTc.id, }) { return }