1package provider
2
3import (
4 "context"
5 "encoding/json"
6 "errors"
7 "fmt"
8 "io"
9 "strings"
10 "time"
11
12 "github.com/anthropics/anthropic-sdk-go"
13 "github.com/anthropics/anthropic-sdk-go/bedrock"
14 "github.com/anthropics/anthropic-sdk-go/option"
15 "github.com/kujtimiihoxha/opencode/internal/config"
16 "github.com/kujtimiihoxha/opencode/internal/llm/tools"
17 "github.com/kujtimiihoxha/opencode/internal/logging"
18 "github.com/kujtimiihoxha/opencode/internal/message"
19)
20
21type anthropicOptions struct {
22 useBedrock bool
23 disableCache bool
24 shouldThink func(userMessage string) bool
25}
26
27type AnthropicOption func(*anthropicOptions)
28
29type anthropicClient struct {
30 providerOptions providerClientOptions
31 options anthropicOptions
32 client anthropic.Client
33}
34
35type AnthropicClient ProviderClient
36
37func newAnthropicClient(opts providerClientOptions) AnthropicClient {
38 anthropicOpts := anthropicOptions{}
39 for _, o := range opts.anthropicOptions {
40 o(&anthropicOpts)
41 }
42
43 anthropicClientOptions := []option.RequestOption{}
44 if opts.apiKey != "" {
45 anthropicClientOptions = append(anthropicClientOptions, option.WithAPIKey(opts.apiKey))
46 }
47 if anthropicOpts.useBedrock {
48 anthropicClientOptions = append(anthropicClientOptions, bedrock.WithLoadDefaultConfig(context.Background()))
49 }
50
51 client := anthropic.NewClient(anthropicClientOptions...)
52 return &anthropicClient{
53 providerOptions: opts,
54 options: anthropicOpts,
55 client: client,
56 }
57}
58
59func (a *anthropicClient) convertMessages(messages []message.Message) (anthropicMessages []anthropic.MessageParam) {
60 cachedBlocks := 0
61 for _, msg := range messages {
62 switch msg.Role {
63 case message.User:
64 content := anthropic.NewTextBlock(msg.Content().String())
65 if cachedBlocks < 2 && !a.options.disableCache {
66 content.OfRequestTextBlock.CacheControl = anthropic.CacheControlEphemeralParam{
67 Type: "ephemeral",
68 }
69 cachedBlocks++
70 }
71 anthropicMessages = append(anthropicMessages, anthropic.NewUserMessage(content))
72
73 case message.Assistant:
74 blocks := []anthropic.ContentBlockParamUnion{}
75 if msg.Content().String() != "" {
76 content := anthropic.NewTextBlock(msg.Content().String())
77 if cachedBlocks < 2 && !a.options.disableCache {
78 content.OfRequestTextBlock.CacheControl = anthropic.CacheControlEphemeralParam{
79 Type: "ephemeral",
80 }
81 cachedBlocks++
82 }
83 blocks = append(blocks, content)
84 }
85
86 for _, toolCall := range msg.ToolCalls() {
87 var inputMap map[string]any
88 err := json.Unmarshal([]byte(toolCall.Input), &inputMap)
89 if err != nil {
90 continue
91 }
92 blocks = append(blocks, anthropic.ContentBlockParamOfRequestToolUseBlock(toolCall.ID, inputMap, toolCall.Name))
93 }
94
95 if len(blocks) == 0 {
96 logging.Warn("There is a message without content, investigate, this should not happen")
97 continue
98 }
99 anthropicMessages = append(anthropicMessages, anthropic.NewAssistantMessage(blocks...))
100
101 case message.Tool:
102 results := make([]anthropic.ContentBlockParamUnion, len(msg.ToolResults()))
103 for i, toolResult := range msg.ToolResults() {
104 results[i] = anthropic.NewToolResultBlock(toolResult.ToolCallID, toolResult.Content, toolResult.IsError)
105 }
106 anthropicMessages = append(anthropicMessages, anthropic.NewUserMessage(results...))
107 }
108 }
109 return
110}
111
112func (a *anthropicClient) convertTools(tools []tools.BaseTool) []anthropic.ToolUnionParam {
113 anthropicTools := make([]anthropic.ToolUnionParam, len(tools))
114
115 for i, tool := range tools {
116 info := tool.Info()
117 toolParam := anthropic.ToolParam{
118 Name: info.Name,
119 Description: anthropic.String(info.Description),
120 InputSchema: anthropic.ToolInputSchemaParam{
121 Properties: info.Parameters,
122 // TODO: figure out how we can tell claude the required fields?
123 },
124 }
125
126 if i == len(tools)-1 && !a.options.disableCache {
127 toolParam.CacheControl = anthropic.CacheControlEphemeralParam{
128 Type: "ephemeral",
129 }
130 }
131
132 anthropicTools[i] = anthropic.ToolUnionParam{OfTool: &toolParam}
133 }
134
135 return anthropicTools
136}
137
138func (a *anthropicClient) finishReason(reason string) message.FinishReason {
139 switch reason {
140 case "end_turn":
141 return message.FinishReasonEndTurn
142 case "max_tokens":
143 return message.FinishReasonMaxTokens
144 case "tool_use":
145 return message.FinishReasonToolUse
146 case "stop_sequence":
147 return message.FinishReasonEndTurn
148 default:
149 return message.FinishReasonUnknown
150 }
151}
152
153func (a *anthropicClient) preparedMessages(messages []anthropic.MessageParam, tools []anthropic.ToolUnionParam) anthropic.MessageNewParams {
154 var thinkingParam anthropic.ThinkingConfigParamUnion
155 lastMessage := messages[len(messages)-1]
156 isUser := lastMessage.Role == anthropic.MessageParamRoleUser
157 messageContent := ""
158 temperature := anthropic.Float(0)
159 if isUser {
160 for _, m := range lastMessage.Content {
161 if m.OfRequestTextBlock != nil && m.OfRequestTextBlock.Text != "" {
162 messageContent = m.OfRequestTextBlock.Text
163 }
164 }
165 if messageContent != "" && a.options.shouldThink != nil && a.options.shouldThink(messageContent) {
166 thinkingParam = anthropic.ThinkingConfigParamUnion{
167 OfThinkingConfigEnabled: &anthropic.ThinkingConfigEnabledParam{
168 BudgetTokens: int64(float64(a.providerOptions.maxTokens) * 0.8),
169 Type: "enabled",
170 },
171 }
172 temperature = anthropic.Float(1)
173 }
174 }
175
176 return anthropic.MessageNewParams{
177 Model: anthropic.Model(a.providerOptions.model.APIModel),
178 MaxTokens: a.providerOptions.maxTokens,
179 Temperature: temperature,
180 Messages: messages,
181 Tools: tools,
182 Thinking: thinkingParam,
183 System: []anthropic.TextBlockParam{
184 {
185 Text: a.providerOptions.systemMessage,
186 CacheControl: anthropic.CacheControlEphemeralParam{
187 Type: "ephemeral",
188 },
189 },
190 },
191 }
192}
193
194func (a *anthropicClient) send(ctx context.Context, messages []message.Message, tools []tools.BaseTool) (resposne *ProviderResponse, err error) {
195 preparedMessages := a.preparedMessages(a.convertMessages(messages), a.convertTools(tools))
196 cfg := config.Get()
197 if cfg.Debug {
198 // jsonData, _ := json.Marshal(preparedMessages)
199 // logging.Debug("Prepared messages", "messages", string(jsonData))
200 }
201 attempts := 0
202 for {
203 attempts++
204 anthropicResponse, err := a.client.Messages.New(
205 ctx,
206 preparedMessages,
207 )
208 // If there is an error we are going to see if we can retry the call
209 if err != nil {
210 retry, after, retryErr := a.shouldRetry(attempts, err)
211 if retryErr != nil {
212 return nil, retryErr
213 }
214 if retry {
215 logging.WarnPersist("Retrying due to rate limit... attempt %d of %d", logging.PersistTimeArg, time.Millisecond*time.Duration(after+100))
216 select {
217 case <-ctx.Done():
218 return nil, ctx.Err()
219 case <-time.After(time.Duration(after) * time.Millisecond):
220 continue
221 }
222 }
223 return nil, retryErr
224 }
225
226 content := ""
227 for _, block := range anthropicResponse.Content {
228 if text, ok := block.AsAny().(anthropic.TextBlock); ok {
229 content += text.Text
230 }
231 }
232
233 return &ProviderResponse{
234 Content: content,
235 ToolCalls: a.toolCalls(*anthropicResponse),
236 Usage: a.usage(*anthropicResponse),
237 }, nil
238 }
239}
240
241func (a *anthropicClient) stream(ctx context.Context, messages []message.Message, tools []tools.BaseTool) <-chan ProviderEvent {
242 preparedMessages := a.preparedMessages(a.convertMessages(messages), a.convertTools(tools))
243 cfg := config.Get()
244 if cfg.Debug {
245 // jsonData, _ := json.Marshal(preparedMessages)
246 // logging.Debug("Prepared messages", "messages", string(jsonData))
247 }
248 attempts := 0
249 eventChan := make(chan ProviderEvent)
250 go func() {
251 for {
252 attempts++
253 anthropicStream := a.client.Messages.NewStreaming(
254 ctx,
255 preparedMessages,
256 )
257 accumulatedMessage := anthropic.Message{}
258
259 currentToolCallID := ""
260 for anthropicStream.Next() {
261 event := anthropicStream.Current()
262 err := accumulatedMessage.Accumulate(event)
263 if err != nil {
264 eventChan <- ProviderEvent{Type: EventError, Error: err}
265 continue
266 }
267
268 switch event := event.AsAny().(type) {
269 case anthropic.ContentBlockStartEvent:
270 if event.ContentBlock.Type == "text" {
271 eventChan <- ProviderEvent{Type: EventContentStart}
272 } else if event.ContentBlock.Type == "tool_use" {
273 currentToolCallID = event.ContentBlock.ID
274 eventChan <- ProviderEvent{
275 Type: EventToolUseStart,
276 ToolCall: &message.ToolCall{
277 ID: event.ContentBlock.ID,
278 Name: event.ContentBlock.Name,
279 Finished: false,
280 },
281 }
282 }
283
284 case anthropic.ContentBlockDeltaEvent:
285 if event.Delta.Type == "thinking_delta" && event.Delta.Thinking != "" {
286 eventChan <- ProviderEvent{
287 Type: EventThinkingDelta,
288 Thinking: event.Delta.Thinking,
289 }
290 } else if event.Delta.Type == "text_delta" && event.Delta.Text != "" {
291 eventChan <- ProviderEvent{
292 Type: EventContentDelta,
293 Content: event.Delta.Text,
294 }
295 } else if event.Delta.Type == "input_json_delta" {
296 if currentToolCallID != "" {
297 eventChan <- ProviderEvent{
298 Type: EventToolUseDelta,
299 ToolCall: &message.ToolCall{
300 ID: currentToolCallID,
301 Finished: false,
302 Input: event.Delta.JSON.PartialJSON.Raw(),
303 },
304 }
305 }
306 }
307 case anthropic.ContentBlockStopEvent:
308 if currentToolCallID != "" {
309 eventChan <- ProviderEvent{
310 Type: EventToolUseStop,
311 ToolCall: &message.ToolCall{
312 ID: currentToolCallID,
313 },
314 }
315 currentToolCallID = ""
316 } else {
317 eventChan <- ProviderEvent{Type: EventContentStop}
318 }
319
320 case anthropic.MessageStopEvent:
321 content := ""
322 for _, block := range accumulatedMessage.Content {
323 if text, ok := block.AsAny().(anthropic.TextBlock); ok {
324 content += text.Text
325 }
326 }
327
328 eventChan <- ProviderEvent{
329 Type: EventComplete,
330 Response: &ProviderResponse{
331 Content: content,
332 ToolCalls: a.toolCalls(accumulatedMessage),
333 Usage: a.usage(accumulatedMessage),
334 FinishReason: a.finishReason(string(accumulatedMessage.StopReason)),
335 },
336 }
337 }
338 }
339
340 err := anthropicStream.Err()
341 if err == nil || errors.Is(err, io.EOF) {
342 close(eventChan)
343 return
344 }
345 // If there is an error we are going to see if we can retry the call
346 retry, after, retryErr := a.shouldRetry(attempts, err)
347 if retryErr != nil {
348 eventChan <- ProviderEvent{Type: EventError, Error: retryErr}
349 close(eventChan)
350 return
351 }
352 if retry {
353 logging.WarnPersist("Retrying due to rate limit... attempt %d of %d", logging.PersistTimeArg, time.Millisecond*time.Duration(after+100))
354 select {
355 case <-ctx.Done():
356 // context cancelled
357 if ctx.Err() != nil {
358 eventChan <- ProviderEvent{Type: EventError, Error: ctx.Err()}
359 }
360 close(eventChan)
361 return
362 case <-time.After(time.Duration(after) * time.Millisecond):
363 continue
364 }
365 }
366 if ctx.Err() != nil {
367 eventChan <- ProviderEvent{Type: EventError, Error: ctx.Err()}
368 }
369
370 close(eventChan)
371 return
372 }
373 }()
374 return eventChan
375}
376
377func (a *anthropicClient) shouldRetry(attempts int, err error) (bool, int64, error) {
378 var apierr *anthropic.Error
379 if !errors.As(err, &apierr) {
380 return false, 0, err
381 }
382
383 if apierr.StatusCode != 429 && apierr.StatusCode != 529 {
384 return false, 0, err
385 }
386
387 if attempts > maxRetries {
388 return false, 0, fmt.Errorf("maximum retry attempts reached for rate limit: %d retries", maxRetries)
389 }
390
391 retryMs := 0
392 retryAfterValues := apierr.Response.Header.Values("Retry-After")
393
394 backoffMs := 2000 * (1 << (attempts - 1))
395 jitterMs := int(float64(backoffMs) * 0.2)
396 retryMs = backoffMs + jitterMs
397 if len(retryAfterValues) > 0 {
398 if _, err := fmt.Sscanf(retryAfterValues[0], "%d", &retryMs); err == nil {
399 retryMs = retryMs * 1000
400 }
401 }
402 return true, int64(retryMs), nil
403}
404
405func (a *anthropicClient) toolCalls(msg anthropic.Message) []message.ToolCall {
406 var toolCalls []message.ToolCall
407
408 for _, block := range msg.Content {
409 switch variant := block.AsAny().(type) {
410 case anthropic.ToolUseBlock:
411 toolCall := message.ToolCall{
412 ID: variant.ID,
413 Name: variant.Name,
414 Input: string(variant.Input),
415 Type: string(variant.Type),
416 Finished: true,
417 }
418 toolCalls = append(toolCalls, toolCall)
419 }
420 }
421
422 return toolCalls
423}
424
425func (a *anthropicClient) usage(msg anthropic.Message) TokenUsage {
426 return TokenUsage{
427 InputTokens: msg.Usage.InputTokens,
428 OutputTokens: msg.Usage.OutputTokens,
429 CacheCreationTokens: msg.Usage.CacheCreationInputTokens,
430 CacheReadTokens: msg.Usage.CacheReadInputTokens,
431 }
432}
433
434func WithAnthropicBedrock(useBedrock bool) AnthropicOption {
435 return func(options *anthropicOptions) {
436 options.useBedrock = useBedrock
437 }
438}
439
440func WithAnthropicDisableCache() AnthropicOption {
441 return func(options *anthropicOptions) {
442 options.disableCache = true
443 }
444}
445
446func DefaultShouldThinkFn(s string) bool {
447 return strings.Contains(strings.ToLower(s), "think")
448}
449
450func WithAnthropicShouldThinkFn(fn func(string) bool) AnthropicOption {
451 return func(options *anthropicOptions) {
452 options.shouldThink = fn
453 }
454}