diff --git a/internal/llm/provider/openai.go b/internal/llm/provider/openai.go index ed5d95ed909124219ef9b5f093153de9ab55d6c1..898759ba7891071a5380752b2d44909341e1de72 100644 --- a/internal/llm/provider/openai.go +++ b/internal/llm/provider/openai.go @@ -342,7 +342,7 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t slog.Debug("Response", "messages", string(jsonData)) } - if len(acc.ChatCompletion.Choices) == 0 { + if len(acc.Choices) == 0 { eventChan <- ProviderEvent{ Type: EventError, Error: fmt.Errorf("received empty streaming response from OpenAI API - check endpoint configuration"), @@ -350,7 +350,7 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t return } - resultFinishReason := acc.ChatCompletion.Choices[0].FinishReason + resultFinishReason := acc.Choices[0].FinishReason if resultFinishReason == "" { // If the finish reason is empty, we assume it was a successful completion // INFO: this is happening for openrouter for some reason