From 5bdccdfbbd2eec17e54ef46cfb4ccb17330dc36b Mon Sep 17 00:00:00 2001 From: Andrey Nering Date: Tue, 26 Aug 2025 15:44:12 -0300 Subject: [PATCH] fix: fix panic with gemini via litellm Fixes #901 --- internal/llm/provider/openai.go | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/internal/llm/provider/openai.go b/internal/llm/provider/openai.go index a7c46f47dd30b83637119a3a50e0d655f3b3b355..e28b0444df023245e235f4a9cffa47adb9a46286 100644 --- a/internal/llm/provider/openai.go +++ b/internal/llm/provider/openai.go @@ -345,8 +345,11 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t var msgToolCalls []openai.ChatCompletionMessageToolCall for openaiStream.Next() { chunk := openaiStream.Current() + if len(chunk.Choices) == 0 { + continue + } // Kujtim: this is an issue with openrouter qwen, its sending -1 for the tool index - if len(chunk.Choices) > 0 && len(chunk.Choices[0].Delta.ToolCalls) > 0 && chunk.Choices[0].Delta.ToolCalls[0].Index == -1 { + if len(chunk.Choices[0].Delta.ToolCalls) > 0 && chunk.Choices[0].Delta.ToolCalls[0].Index == -1 { chunk.Choices[0].Delta.ToolCalls[0].Index = 0 } acc.AddChunk(chunk)