From 5ae1e751c58e01c0bdd8eb54bc7145f973d30e20 Mon Sep 17 00:00:00 2001 From: Fabio Mora <17892293+Fuabioo@users.noreply.github.com> Date: Tue, 15 Jul 2025 21:10:56 -0600 Subject: [PATCH] Fix lint issue on OpenAI field access in streaming response handler --- internal/llm/provider/openai.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/internal/llm/provider/openai.go b/internal/llm/provider/openai.go index ed5d95ed909124219ef9b5f093153de9ab55d6c1..898759ba7891071a5380752b2d44909341e1de72 100644 --- a/internal/llm/provider/openai.go +++ b/internal/llm/provider/openai.go @@ -342,7 +342,7 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t slog.Debug("Response", "messages", string(jsonData)) } - if len(acc.ChatCompletion.Choices) == 0 { + if len(acc.Choices) == 0 { eventChan <- ProviderEvent{ Type: EventError, Error: fmt.Errorf("received empty streaming response from OpenAI API - check endpoint configuration"), @@ -350,7 +350,7 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t return } - resultFinishReason := acc.ChatCompletion.Choices[0].FinishReason + resultFinishReason := acc.Choices[0].FinishReason if resultFinishReason == "" { // If the finish reason is empty, we assume it was a successful completion // INFO: this is happening for openrouter for some reason