fix(llm): set request timeout (#736)

Carlos Alexandro Becker created

* fix(llm): set request timeout

If the server is down, address don't resolve, or another myriad of
reasons, currently, it'll get stuck.

This makes it so it times out after 1 minute.

Signed-off-by: Carlos Alexandro Becker <caarlos0@users.noreply.github.com>

* fix: remove log

Signed-off-by: Carlos Alexandro Becker <caarlos0@users.noreply.github.com>

---------

Signed-off-by: Carlos Alexandro Becker <caarlos0@users.noreply.github.com>

Change summary

internal/llm/provider/anthropic.go | 3 +--
internal/llm/provider/openai.go    | 2 +-
2 files changed, 2 insertions(+), 3 deletions(-)

Detailed changes

internal/llm/provider/anthropic.go 🔗

@@ -153,7 +153,6 @@ func (a *anthropicClient) convertMessages(messages []message.Message) (anthropic
 			}
 
 			if len(blocks) == 0 {
-				slog.Warn("There is a message without content, investigate, this should not happen")
 				continue
 			}
 			anthropicMessages = append(anthropicMessages, anthropic.NewAssistantMessage(blocks...))
@@ -333,7 +332,7 @@ func (a *anthropicClient) stream(ctx context.Context, messages []message.Message
 			// Prepare messages on each attempt in case max_tokens was adjusted
 			preparedMessages := a.preparedMessages(a.convertMessages(messages), a.convertTools(tools))
 
-			var opts []option.RequestOption
+			opts := []option.RequestOption{option.WithRequestTimeout(time.Minute)}
 			if a.isThinkingEnabled() {
 				opts = append(opts, option.WithHeaderAdd("anthropic-beta", "interleaved-thinking-2025-05-14"))
 			}

internal/llm/provider/openai.go 🔗

@@ -166,7 +166,6 @@ func (o *openaiClient) convertMessages(messages []message.Message) (openaiMessag
 				}
 			}
 			if !hasContent {
-				slog.Warn("There is a message without content, investigate, this should not happen")
 				continue
 			}
 
@@ -338,6 +337,7 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
 			openaiStream := o.client.Chat.Completions.NewStreaming(
 				ctx,
 				params,
+				option.WithRequestTimeout(time.Minute),
 			)
 
 			acc := openai.ChatCompletionAccumulator{}