From 0f403feafcd57045802ce0e24b4eff01d409cef5 Mon Sep 17 00:00:00 2001 From: Carlos Alexandro Becker Date: Thu, 14 Aug 2025 10:58:38 -0300 Subject: [PATCH] fix(llm): set request timeout (#736) * fix(llm): set request timeout If the server is down, address don't resolve, or another myriad of reasons, currently, it'll get stuck. This makes it so it times out after 1 minute. Signed-off-by: Carlos Alexandro Becker * fix: remove log Signed-off-by: Carlos Alexandro Becker --------- Signed-off-by: Carlos Alexandro Becker --- internal/llm/provider/anthropic.go | 3 +-- internal/llm/provider/openai.go | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/internal/llm/provider/anthropic.go b/internal/llm/provider/anthropic.go index 6b655f7f7741a658ea3e7108e1fba0032c95e563..e626f37d998c72cdcc3602236f906cbb41ac1cc0 100644 --- a/internal/llm/provider/anthropic.go +++ b/internal/llm/provider/anthropic.go @@ -153,7 +153,6 @@ func (a *anthropicClient) convertMessages(messages []message.Message) (anthropic } if len(blocks) == 0 { - slog.Warn("There is a message without content, investigate, this should not happen") continue } anthropicMessages = append(anthropicMessages, anthropic.NewAssistantMessage(blocks...)) @@ -333,7 +332,7 @@ func (a *anthropicClient) stream(ctx context.Context, messages []message.Message // Prepare messages on each attempt in case max_tokens was adjusted preparedMessages := a.preparedMessages(a.convertMessages(messages), a.convertTools(tools)) - var opts []option.RequestOption + opts := []option.RequestOption{option.WithRequestTimeout(time.Minute)} if a.isThinkingEnabled() { opts = append(opts, option.WithHeaderAdd("anthropic-beta", "interleaved-thinking-2025-05-14")) } diff --git a/internal/llm/provider/openai.go b/internal/llm/provider/openai.go index 8433282fc0b5e771a0a28184895a864b30b0e389..38497a73a06297976db7361dcf122ec80eb6bca8 100644 --- a/internal/llm/provider/openai.go +++ b/internal/llm/provider/openai.go @@ -166,7 +166,6 @@ func (o *openaiClient) convertMessages(messages []message.Message) (openaiMessag } } if !hasContent { - slog.Warn("There is a message without content, investigate, this should not happen") continue } @@ -338,6 +337,7 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t openaiStream := o.client.Chat.Completions.NewStreaming( ctx, params, + option.WithRequestTimeout(time.Minute), ) acc := openai.ChatCompletionAccumulator{}