1package provider
2
3import (
4 "context"
5 "encoding/json"
6 "errors"
7 "fmt"
8 "io"
9 "strings"
10 "time"
11
12 "github.com/anthropics/anthropic-sdk-go"
13 "github.com/anthropics/anthropic-sdk-go/bedrock"
14 "github.com/anthropics/anthropic-sdk-go/option"
15 "github.com/opencode-ai/opencode/internal/config"
16 "github.com/opencode-ai/opencode/internal/llm/tools"
17 "github.com/opencode-ai/opencode/internal/logging"
18 "github.com/opencode-ai/opencode/internal/message"
19)
20
21type anthropicOptions struct {
22 useBedrock bool
23 disableCache bool
24 shouldThink func(userMessage string) bool
25}
26
27type AnthropicOption func(*anthropicOptions)
28
29type anthropicClient struct {
30 providerOptions providerClientOptions
31 options anthropicOptions
32 client anthropic.Client
33}
34
35type AnthropicClient ProviderClient
36
37func newAnthropicClient(opts providerClientOptions) AnthropicClient {
38 anthropicOpts := anthropicOptions{}
39 for _, o := range opts.anthropicOptions {
40 o(&anthropicOpts)
41 }
42
43 anthropicClientOptions := []option.RequestOption{}
44 if opts.apiKey != "" {
45 anthropicClientOptions = append(anthropicClientOptions, option.WithAPIKey(opts.apiKey))
46 }
47 if anthropicOpts.useBedrock {
48 anthropicClientOptions = append(anthropicClientOptions, bedrock.WithLoadDefaultConfig(context.Background()))
49 }
50
51 client := anthropic.NewClient(anthropicClientOptions...)
52 return &anthropicClient{
53 providerOptions: opts,
54 options: anthropicOpts,
55 client: client,
56 }
57}
58
59func (a *anthropicClient) convertMessages(messages []message.Message) (anthropicMessages []anthropic.MessageParam) {
60 for i, msg := range messages {
61 cache := false
62 if i > len(messages)-3 {
63 cache = true
64 }
65 switch msg.Role {
66 case message.User:
67 content := anthropic.NewTextBlock(msg.Content().String())
68 if cache && !a.options.disableCache {
69 content.OfRequestTextBlock.CacheControl = anthropic.CacheControlEphemeralParam{
70 Type: "ephemeral",
71 }
72 }
73 anthropicMessages = append(anthropicMessages, anthropic.NewUserMessage(content))
74
75 case message.Assistant:
76 blocks := []anthropic.ContentBlockParamUnion{}
77 if msg.Content().String() != "" {
78 content := anthropic.NewTextBlock(msg.Content().String())
79 if cache && !a.options.disableCache {
80 content.OfRequestTextBlock.CacheControl = anthropic.CacheControlEphemeralParam{
81 Type: "ephemeral",
82 }
83 }
84 blocks = append(blocks, content)
85 }
86
87 for _, toolCall := range msg.ToolCalls() {
88 var inputMap map[string]any
89 err := json.Unmarshal([]byte(toolCall.Input), &inputMap)
90 if err != nil {
91 continue
92 }
93 blocks = append(blocks, anthropic.ContentBlockParamOfRequestToolUseBlock(toolCall.ID, inputMap, toolCall.Name))
94 }
95
96 if len(blocks) == 0 {
97 logging.Warn("There is a message without content, investigate, this should not happen")
98 continue
99 }
100 anthropicMessages = append(anthropicMessages, anthropic.NewAssistantMessage(blocks...))
101
102 case message.Tool:
103 results := make([]anthropic.ContentBlockParamUnion, len(msg.ToolResults()))
104 for i, toolResult := range msg.ToolResults() {
105 results[i] = anthropic.NewToolResultBlock(toolResult.ToolCallID, toolResult.Content, toolResult.IsError)
106 }
107 anthropicMessages = append(anthropicMessages, anthropic.NewUserMessage(results...))
108 }
109 }
110 return
111}
112
113func (a *anthropicClient) convertTools(tools []tools.BaseTool) []anthropic.ToolUnionParam {
114 anthropicTools := make([]anthropic.ToolUnionParam, len(tools))
115
116 for i, tool := range tools {
117 info := tool.Info()
118 toolParam := anthropic.ToolParam{
119 Name: info.Name,
120 Description: anthropic.String(info.Description),
121 InputSchema: anthropic.ToolInputSchemaParam{
122 Properties: info.Parameters,
123 // TODO: figure out how we can tell claude the required fields?
124 },
125 }
126
127 if i == len(tools)-1 && !a.options.disableCache {
128 toolParam.CacheControl = anthropic.CacheControlEphemeralParam{
129 Type: "ephemeral",
130 }
131 }
132
133 anthropicTools[i] = anthropic.ToolUnionParam{OfTool: &toolParam}
134 }
135
136 return anthropicTools
137}
138
139func (a *anthropicClient) finishReason(reason string) message.FinishReason {
140 switch reason {
141 case "end_turn":
142 return message.FinishReasonEndTurn
143 case "max_tokens":
144 return message.FinishReasonMaxTokens
145 case "tool_use":
146 return message.FinishReasonToolUse
147 case "stop_sequence":
148 return message.FinishReasonEndTurn
149 default:
150 return message.FinishReasonUnknown
151 }
152}
153
154func (a *anthropicClient) preparedMessages(messages []anthropic.MessageParam, tools []anthropic.ToolUnionParam) anthropic.MessageNewParams {
155 var thinkingParam anthropic.ThinkingConfigParamUnion
156 lastMessage := messages[len(messages)-1]
157 isUser := lastMessage.Role == anthropic.MessageParamRoleUser
158 messageContent := ""
159 temperature := anthropic.Float(0)
160 if isUser {
161 for _, m := range lastMessage.Content {
162 if m.OfRequestTextBlock != nil && m.OfRequestTextBlock.Text != "" {
163 messageContent = m.OfRequestTextBlock.Text
164 }
165 }
166 if messageContent != "" && a.options.shouldThink != nil && a.options.shouldThink(messageContent) {
167 thinkingParam = anthropic.ThinkingConfigParamUnion{
168 OfThinkingConfigEnabled: &anthropic.ThinkingConfigEnabledParam{
169 BudgetTokens: int64(float64(a.providerOptions.maxTokens) * 0.8),
170 Type: "enabled",
171 },
172 }
173 temperature = anthropic.Float(1)
174 }
175 }
176
177 return anthropic.MessageNewParams{
178 Model: anthropic.Model(a.providerOptions.model.APIModel),
179 MaxTokens: a.providerOptions.maxTokens,
180 Temperature: temperature,
181 Messages: messages,
182 Tools: tools,
183 Thinking: thinkingParam,
184 System: []anthropic.TextBlockParam{
185 {
186 Text: a.providerOptions.systemMessage,
187 CacheControl: anthropic.CacheControlEphemeralParam{
188 Type: "ephemeral",
189 },
190 },
191 },
192 }
193}
194
195func (a *anthropicClient) send(ctx context.Context, messages []message.Message, tools []tools.BaseTool) (resposne *ProviderResponse, err error) {
196 preparedMessages := a.preparedMessages(a.convertMessages(messages), a.convertTools(tools))
197 cfg := config.Get()
198 if cfg.Debug {
199 // jsonData, _ := json.Marshal(preparedMessages)
200 // logging.Debug("Prepared messages", "messages", string(jsonData))
201 }
202 attempts := 0
203 for {
204 attempts++
205 anthropicResponse, err := a.client.Messages.New(
206 ctx,
207 preparedMessages,
208 )
209 // If there is an error we are going to see if we can retry the call
210 if err != nil {
211 retry, after, retryErr := a.shouldRetry(attempts, err)
212 if retryErr != nil {
213 return nil, retryErr
214 }
215 if retry {
216 logging.WarnPersist(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries), logging.PersistTimeArg, time.Millisecond*time.Duration(after+100))
217 select {
218 case <-ctx.Done():
219 return nil, ctx.Err()
220 case <-time.After(time.Duration(after) * time.Millisecond):
221 continue
222 }
223 }
224 return nil, retryErr
225 }
226
227 content := ""
228 for _, block := range anthropicResponse.Content {
229 if text, ok := block.AsAny().(anthropic.TextBlock); ok {
230 content += text.Text
231 }
232 }
233
234 return &ProviderResponse{
235 Content: content,
236 ToolCalls: a.toolCalls(*anthropicResponse),
237 Usage: a.usage(*anthropicResponse),
238 }, nil
239 }
240}
241
242func (a *anthropicClient) stream(ctx context.Context, messages []message.Message, tools []tools.BaseTool) <-chan ProviderEvent {
243 preparedMessages := a.preparedMessages(a.convertMessages(messages), a.convertTools(tools))
244 cfg := config.Get()
245 if cfg.Debug {
246 // jsonData, _ := json.Marshal(preparedMessages)
247 // logging.Debug("Prepared messages", "messages", string(jsonData))
248 }
249 attempts := 0
250 eventChan := make(chan ProviderEvent)
251 go func() {
252 for {
253 attempts++
254 anthropicStream := a.client.Messages.NewStreaming(
255 ctx,
256 preparedMessages,
257 )
258 accumulatedMessage := anthropic.Message{}
259
260 currentToolCallID := ""
261 for anthropicStream.Next() {
262 event := anthropicStream.Current()
263 err := accumulatedMessage.Accumulate(event)
264 if err != nil {
265 logging.Warn("Error accumulating message", "error", err)
266 continue
267 }
268
269 switch event := event.AsAny().(type) {
270 case anthropic.ContentBlockStartEvent:
271 if event.ContentBlock.Type == "text" {
272 eventChan <- ProviderEvent{Type: EventContentStart}
273 } else if event.ContentBlock.Type == "tool_use" {
274 currentToolCallID = event.ContentBlock.ID
275 eventChan <- ProviderEvent{
276 Type: EventToolUseStart,
277 ToolCall: &message.ToolCall{
278 ID: event.ContentBlock.ID,
279 Name: event.ContentBlock.Name,
280 Finished: false,
281 },
282 }
283 }
284
285 case anthropic.ContentBlockDeltaEvent:
286 if event.Delta.Type == "thinking_delta" && event.Delta.Thinking != "" {
287 eventChan <- ProviderEvent{
288 Type: EventThinkingDelta,
289 Thinking: event.Delta.Thinking,
290 }
291 } else if event.Delta.Type == "text_delta" && event.Delta.Text != "" {
292 eventChan <- ProviderEvent{
293 Type: EventContentDelta,
294 Content: event.Delta.Text,
295 }
296 } else if event.Delta.Type == "input_json_delta" {
297 if currentToolCallID != "" {
298 eventChan <- ProviderEvent{
299 Type: EventToolUseDelta,
300 ToolCall: &message.ToolCall{
301 ID: currentToolCallID,
302 Finished: false,
303 Input: event.Delta.JSON.PartialJSON.Raw(),
304 },
305 }
306 }
307 }
308 case anthropic.ContentBlockStopEvent:
309 if currentToolCallID != "" {
310 eventChan <- ProviderEvent{
311 Type: EventToolUseStop,
312 ToolCall: &message.ToolCall{
313 ID: currentToolCallID,
314 },
315 }
316 currentToolCallID = ""
317 } else {
318 eventChan <- ProviderEvent{Type: EventContentStop}
319 }
320
321 case anthropic.MessageStopEvent:
322 content := ""
323 for _, block := range accumulatedMessage.Content {
324 if text, ok := block.AsAny().(anthropic.TextBlock); ok {
325 content += text.Text
326 }
327 }
328
329 eventChan <- ProviderEvent{
330 Type: EventComplete,
331 Response: &ProviderResponse{
332 Content: content,
333 ToolCalls: a.toolCalls(accumulatedMessage),
334 Usage: a.usage(accumulatedMessage),
335 FinishReason: a.finishReason(string(accumulatedMessage.StopReason)),
336 },
337 }
338 }
339 }
340
341 err := anthropicStream.Err()
342 if err == nil || errors.Is(err, io.EOF) {
343 close(eventChan)
344 return
345 }
346 // If there is an error we are going to see if we can retry the call
347 retry, after, retryErr := a.shouldRetry(attempts, err)
348 if retryErr != nil {
349 eventChan <- ProviderEvent{Type: EventError, Error: retryErr}
350 close(eventChan)
351 return
352 }
353 if retry {
354 logging.WarnPersist(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries), logging.PersistTimeArg, time.Millisecond*time.Duration(after+100))
355 select {
356 case <-ctx.Done():
357 // context cancelled
358 if ctx.Err() != nil {
359 eventChan <- ProviderEvent{Type: EventError, Error: ctx.Err()}
360 }
361 close(eventChan)
362 return
363 case <-time.After(time.Duration(after) * time.Millisecond):
364 continue
365 }
366 }
367 if ctx.Err() != nil {
368 eventChan <- ProviderEvent{Type: EventError, Error: ctx.Err()}
369 }
370
371 close(eventChan)
372 return
373 }
374 }()
375 return eventChan
376}
377
378func (a *anthropicClient) shouldRetry(attempts int, err error) (bool, int64, error) {
379 var apierr *anthropic.Error
380 if !errors.As(err, &apierr) {
381 return false, 0, err
382 }
383
384 if apierr.StatusCode != 429 && apierr.StatusCode != 529 {
385 return false, 0, err
386 }
387
388 if attempts > maxRetries {
389 return false, 0, fmt.Errorf("maximum retry attempts reached for rate limit: %d retries", maxRetries)
390 }
391
392 retryMs := 0
393 retryAfterValues := apierr.Response.Header.Values("Retry-After")
394
395 backoffMs := 2000 * (1 << (attempts - 1))
396 jitterMs := int(float64(backoffMs) * 0.2)
397 retryMs = backoffMs + jitterMs
398 if len(retryAfterValues) > 0 {
399 if _, err := fmt.Sscanf(retryAfterValues[0], "%d", &retryMs); err == nil {
400 retryMs = retryMs * 1000
401 }
402 }
403 return true, int64(retryMs), nil
404}
405
406func (a *anthropicClient) toolCalls(msg anthropic.Message) []message.ToolCall {
407 var toolCalls []message.ToolCall
408
409 for _, block := range msg.Content {
410 switch variant := block.AsAny().(type) {
411 case anthropic.ToolUseBlock:
412 toolCall := message.ToolCall{
413 ID: variant.ID,
414 Name: variant.Name,
415 Input: string(variant.Input),
416 Type: string(variant.Type),
417 Finished: true,
418 }
419 toolCalls = append(toolCalls, toolCall)
420 }
421 }
422
423 return toolCalls
424}
425
426func (a *anthropicClient) usage(msg anthropic.Message) TokenUsage {
427 return TokenUsage{
428 InputTokens: msg.Usage.InputTokens,
429 OutputTokens: msg.Usage.OutputTokens,
430 CacheCreationTokens: msg.Usage.CacheCreationInputTokens,
431 CacheReadTokens: msg.Usage.CacheReadInputTokens,
432 }
433}
434
435func WithAnthropicBedrock(useBedrock bool) AnthropicOption {
436 return func(options *anthropicOptions) {
437 options.useBedrock = useBedrock
438 }
439}
440
441func WithAnthropicDisableCache() AnthropicOption {
442 return func(options *anthropicOptions) {
443 options.disableCache = true
444 }
445}
446
447func DefaultShouldThinkFn(s string) bool {
448 return strings.Contains(strings.ToLower(s), "think")
449}
450
451func WithAnthropicShouldThinkFn(fn func(string) bool) AnthropicOption {
452 return func(options *anthropicOptions) {
453 options.shouldThink = fn
454 }
455}