1package provider
  2
  3import (
  4	"context"
  5	"encoding/json"
  6	"errors"
  7	"fmt"
  8	"io"
  9	"strings"
 10	"time"
 11
 12	"github.com/anthropics/anthropic-sdk-go"
 13	"github.com/anthropics/anthropic-sdk-go/bedrock"
 14	"github.com/anthropics/anthropic-sdk-go/option"
 15	"github.com/opencode-ai/opencode/internal/config"
 16	"github.com/opencode-ai/opencode/internal/llm/models"
 17	toolsPkg "github.com/opencode-ai/opencode/internal/llm/tools"
 18	"github.com/opencode-ai/opencode/internal/logging"
 19	"github.com/opencode-ai/opencode/internal/message"
 20)
 21
 22type anthropicOptions struct {
 23	useBedrock   bool
 24	disableCache bool
 25	shouldThink  func(userMessage string) bool
 26}
 27
 28type AnthropicOption func(*anthropicOptions)
 29
 30type anthropicClient struct {
 31	providerOptions providerClientOptions
 32	options         anthropicOptions
 33	client          anthropic.Client
 34}
 35
 36type AnthropicClient ProviderClient
 37
 38func newAnthropicClient(opts providerClientOptions) AnthropicClient {
 39	anthropicOpts := anthropicOptions{}
 40	for _, o := range opts.anthropicOptions {
 41		o(&anthropicOpts)
 42	}
 43
 44	anthropicClientOptions := []option.RequestOption{}
 45	if opts.apiKey != "" {
 46		anthropicClientOptions = append(anthropicClientOptions, option.WithAPIKey(opts.apiKey))
 47	}
 48	if anthropicOpts.useBedrock {
 49		anthropicClientOptions = append(anthropicClientOptions, bedrock.WithLoadDefaultConfig(context.Background()))
 50	}
 51
 52	client := anthropic.NewClient(anthropicClientOptions...)
 53	return &anthropicClient{
 54		providerOptions: opts,
 55		options:         anthropicOpts,
 56		client:          client,
 57	}
 58}
 59
 60func (a *anthropicClient) convertMessages(messages []message.Message) (anthropicMessages []anthropic.MessageParam) {
 61	for i, msg := range messages {
 62		cache := false
 63		if i > len(messages)-3 {
 64			cache = true
 65		}
 66		switch msg.Role {
 67		case message.User:
 68			content := anthropic.NewTextBlock(msg.Content().String())
 69			if cache && !a.options.disableCache {
 70				content.OfText.CacheControl = anthropic.CacheControlEphemeralParam{
 71					Type: "ephemeral",
 72				}
 73			}
 74			var contentBlocks []anthropic.ContentBlockParamUnion
 75			contentBlocks = append(contentBlocks, content)
 76			for _, binaryContent := range msg.BinaryContent() {
 77				base64Image := binaryContent.String(models.ProviderAnthropic)
 78				imageBlock := anthropic.NewImageBlockBase64(binaryContent.MIMEType, base64Image)
 79				contentBlocks = append(contentBlocks, imageBlock)
 80			}
 81			anthropicMessages = append(anthropicMessages, anthropic.NewUserMessage(contentBlocks...))
 82
 83		case message.Assistant:
 84			blocks := []anthropic.ContentBlockParamUnion{}
 85			if msg.Content().String() != "" {
 86				content := anthropic.NewTextBlock(msg.Content().String())
 87				if cache && !a.options.disableCache {
 88					content.OfText.CacheControl = anthropic.CacheControlEphemeralParam{
 89						Type: "ephemeral",
 90					}
 91				}
 92				blocks = append(blocks, content)
 93			}
 94
 95			for _, toolCall := range msg.ToolCalls() {
 96				var inputMap map[string]any
 97				err := json.Unmarshal([]byte(toolCall.Input), &inputMap)
 98				if err != nil {
 99					continue
100				}
101				blocks = append(blocks, anthropic.NewToolUseBlock(toolCall.ID, inputMap, toolCall.Name))
102			}
103
104			if len(blocks) == 0 {
105				logging.Warn("There is a message without content, investigate, this should not happen")
106				continue
107			}
108			anthropicMessages = append(anthropicMessages, anthropic.NewAssistantMessage(blocks...))
109
110		case message.Tool:
111			results := make([]anthropic.ContentBlockParamUnion, len(msg.ToolResults()))
112			for i, toolResult := range msg.ToolResults() {
113				results[i] = anthropic.NewToolResultBlock(toolResult.ToolCallID, toolResult.Content, toolResult.IsError)
114			}
115			anthropicMessages = append(anthropicMessages, anthropic.NewUserMessage(results...))
116		}
117	}
118	return
119}
120
121func (a *anthropicClient) convertTools(tools []toolsPkg.BaseTool) []anthropic.ToolUnionParam {
122	anthropicTools := make([]anthropic.ToolUnionParam, len(tools))
123
124	for i, tool := range tools {
125		info := tool.Info()
126		toolParam := anthropic.ToolParam{
127			Name:        info.Name,
128			Description: anthropic.String(info.Description),
129			InputSchema: anthropic.ToolInputSchemaParam{
130				Properties: info.Parameters,
131				// TODO: figure out how we can tell claude the required fields?
132			},
133		}
134
135		if i == len(tools)-1 && !a.options.disableCache {
136			toolParam.CacheControl = anthropic.CacheControlEphemeralParam{
137				Type: "ephemeral",
138			}
139		}
140
141		anthropicTools[i] = anthropic.ToolUnionParam{OfTool: &toolParam}
142	}
143
144	return anthropicTools
145}
146
147func (a *anthropicClient) finishReason(reason string) message.FinishReason {
148	switch reason {
149	case "end_turn":
150		return message.FinishReasonEndTurn
151	case "max_tokens":
152		return message.FinishReasonMaxTokens
153	case "tool_use":
154		return message.FinishReasonToolUse
155	case "stop_sequence":
156		return message.FinishReasonEndTurn
157	default:
158		return message.FinishReasonUnknown
159	}
160}
161
162func (a *anthropicClient) preparedMessages(messages []anthropic.MessageParam, tools []anthropic.ToolUnionParam) anthropic.MessageNewParams {
163	var thinkingParam anthropic.ThinkingConfigParamUnion
164	lastMessage := messages[len(messages)-1]
165	isUser := lastMessage.Role == anthropic.MessageParamRoleUser
166	messageContent := ""
167	temperature := anthropic.Float(0)
168	if isUser {
169		for _, m := range lastMessage.Content {
170			if m.OfText != nil && m.OfText.Text != "" {
171				messageContent = m.OfText.Text
172			}
173		}
174		if messageContent != "" && a.options.shouldThink != nil && a.options.shouldThink(messageContent) {
175			thinkingParam = anthropic.ThinkingConfigParamOfEnabled(int64(float64(a.providerOptions.maxTokens) * 0.8))
176			temperature = anthropic.Float(1)
177		}
178	}
179
180	return anthropic.MessageNewParams{
181		Model:       anthropic.Model(a.providerOptions.model.APIModel),
182		MaxTokens:   a.providerOptions.maxTokens,
183		Temperature: temperature,
184		Messages:    messages,
185		Tools:       tools,
186		Thinking:    thinkingParam,
187		System: []anthropic.TextBlockParam{
188			{
189				Text: a.providerOptions.systemMessage,
190				CacheControl: anthropic.CacheControlEphemeralParam{
191					Type: "ephemeral",
192				},
193			},
194		},
195	}
196}
197
198func (a *anthropicClient) send(ctx context.Context, messages []message.Message, tools []toolsPkg.BaseTool) (resposne *ProviderResponse, err error) {
199	preparedMessages := a.preparedMessages(a.convertMessages(messages), a.convertTools(tools))
200	cfg := config.Get()
201	if cfg.Debug {
202		jsonData, _ := json.Marshal(preparedMessages)
203		logging.Debug("Prepared messages", "messages", string(jsonData))
204	}
205
206	attempts := 0
207	for {
208		attempts++
209		anthropicResponse, err := a.client.Messages.New(
210			ctx,
211			preparedMessages,
212		)
213		// If there is an error we are going to see if we can retry the call
214		if err != nil {
215			logging.Error("Error in Anthropic API call", "error", err)
216			retry, after, retryErr := a.shouldRetry(attempts, err)
217			if retryErr != nil {
218				return nil, retryErr
219			}
220			if retry {
221				logging.WarnPersist(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries), logging.PersistTimeArg, time.Millisecond*time.Duration(after+100))
222				select {
223				case <-ctx.Done():
224					return nil, ctx.Err()
225				case <-time.After(time.Duration(after) * time.Millisecond):
226					continue
227				}
228			}
229			return nil, retryErr
230		}
231
232		content := ""
233		for _, block := range anthropicResponse.Content {
234			if text, ok := block.AsAny().(anthropic.TextBlock); ok {
235				content += text.Text
236			}
237		}
238
239		return &ProviderResponse{
240			Content:   content,
241			ToolCalls: a.toolCalls(*anthropicResponse),
242			Usage:     a.usage(*anthropicResponse),
243		}, nil
244	}
245}
246
247func (a *anthropicClient) stream(ctx context.Context, messages []message.Message, tools []toolsPkg.BaseTool) <-chan ProviderEvent {
248	preparedMessages := a.preparedMessages(a.convertMessages(messages), a.convertTools(tools))
249	cfg := config.Get()
250
251	var sessionId string
252	requestSeqId := (len(messages) + 1) / 2
253	if cfg.Debug {
254		if sid, ok := ctx.Value(toolsPkg.SessionIDContextKey).(string); ok {
255			sessionId = sid
256		}
257		jsonData, _ := json.Marshal(preparedMessages)
258		if sessionId != "" {
259			filepath := logging.WriteRequestMessageJson(sessionId, requestSeqId, preparedMessages)
260			logging.Debug("Prepared messages", "filepath", filepath)
261		} else {
262			logging.Debug("Prepared messages", "messages", string(jsonData))
263		}
264
265	}
266	attempts := 0
267	eventChan := make(chan ProviderEvent)
268	go func() {
269		for {
270			attempts++
271			anthropicStream := a.client.Messages.NewStreaming(
272				ctx,
273				preparedMessages,
274			)
275			accumulatedMessage := anthropic.Message{}
276
277			currentToolCallID := ""
278			for anthropicStream.Next() {
279				event := anthropicStream.Current()
280				err := accumulatedMessage.Accumulate(event)
281				if err != nil {
282					logging.Warn("Error accumulating message", "error", err)
283					continue
284				}
285
286				switch event := event.AsAny().(type) {
287				case anthropic.ContentBlockStartEvent:
288					if event.ContentBlock.Type == "text" {
289						eventChan <- ProviderEvent{Type: EventContentStart}
290					} else if event.ContentBlock.Type == "tool_use" {
291						currentToolCallID = event.ContentBlock.ID
292						eventChan <- ProviderEvent{
293							Type: EventToolUseStart,
294							ToolCall: &message.ToolCall{
295								ID:       event.ContentBlock.ID,
296								Name:     event.ContentBlock.Name,
297								Finished: false,
298							},
299						}
300					}
301
302				case anthropic.ContentBlockDeltaEvent:
303					if event.Delta.Type == "thinking_delta" && event.Delta.Thinking != "" {
304						eventChan <- ProviderEvent{
305							Type:     EventThinkingDelta,
306							Thinking: event.Delta.Thinking,
307						}
308					} else if event.Delta.Type == "text_delta" && event.Delta.Text != "" {
309						eventChan <- ProviderEvent{
310							Type:    EventContentDelta,
311							Content: event.Delta.Text,
312						}
313					} else if event.Delta.Type == "input_json_delta" {
314						if currentToolCallID != "" {
315							eventChan <- ProviderEvent{
316								Type: EventToolUseDelta,
317								ToolCall: &message.ToolCall{
318									ID:       currentToolCallID,
319									Finished: false,
320									Input:    event.Delta.JSON.PartialJSON.Raw(),
321								},
322							}
323						}
324					}
325				case anthropic.ContentBlockStopEvent:
326					if currentToolCallID != "" {
327						eventChan <- ProviderEvent{
328							Type: EventToolUseStop,
329							ToolCall: &message.ToolCall{
330								ID: currentToolCallID,
331							},
332						}
333						currentToolCallID = ""
334					} else {
335						eventChan <- ProviderEvent{Type: EventContentStop}
336					}
337
338				case anthropic.MessageStopEvent:
339					content := ""
340					for _, block := range accumulatedMessage.Content {
341						if text, ok := block.AsAny().(anthropic.TextBlock); ok {
342							content += text.Text
343						}
344					}
345
346					eventChan <- ProviderEvent{
347						Type: EventComplete,
348						Response: &ProviderResponse{
349							Content:      content,
350							ToolCalls:    a.toolCalls(accumulatedMessage),
351							Usage:        a.usage(accumulatedMessage),
352							FinishReason: a.finishReason(string(accumulatedMessage.StopReason)),
353						},
354					}
355				}
356			}
357
358			err := anthropicStream.Err()
359			if err == nil || errors.Is(err, io.EOF) {
360				close(eventChan)
361				return
362			}
363			// If there is an error we are going to see if we can retry the call
364			retry, after, retryErr := a.shouldRetry(attempts, err)
365			if retryErr != nil {
366				eventChan <- ProviderEvent{Type: EventError, Error: retryErr}
367				close(eventChan)
368				return
369			}
370			if retry {
371				logging.WarnPersist(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries), logging.PersistTimeArg, time.Millisecond*time.Duration(after+100))
372				select {
373				case <-ctx.Done():
374					// context cancelled
375					if ctx.Err() != nil {
376						eventChan <- ProviderEvent{Type: EventError, Error: ctx.Err()}
377					}
378					close(eventChan)
379					return
380				case <-time.After(time.Duration(after) * time.Millisecond):
381					continue
382				}
383			}
384			if ctx.Err() != nil {
385				eventChan <- ProviderEvent{Type: EventError, Error: ctx.Err()}
386			}
387
388			close(eventChan)
389			return
390		}
391	}()
392	return eventChan
393}
394
395func (a *anthropicClient) shouldRetry(attempts int, err error) (bool, int64, error) {
396	var apierr *anthropic.Error
397	if !errors.As(err, &apierr) {
398		return false, 0, err
399	}
400
401	if apierr.StatusCode != 429 && apierr.StatusCode != 529 {
402		return false, 0, err
403	}
404
405	if attempts > maxRetries {
406		return false, 0, fmt.Errorf("maximum retry attempts reached for rate limit: %d retries", maxRetries)
407	}
408
409	retryMs := 0
410	retryAfterValues := apierr.Response.Header.Values("Retry-After")
411
412	backoffMs := 2000 * (1 << (attempts - 1))
413	jitterMs := int(float64(backoffMs) * 0.2)
414	retryMs = backoffMs + jitterMs
415	if len(retryAfterValues) > 0 {
416		if _, err := fmt.Sscanf(retryAfterValues[0], "%d", &retryMs); err == nil {
417			retryMs = retryMs * 1000
418		}
419	}
420	return true, int64(retryMs), nil
421}
422
423func (a *anthropicClient) toolCalls(msg anthropic.Message) []message.ToolCall {
424	var toolCalls []message.ToolCall
425
426	for _, block := range msg.Content {
427		switch variant := block.AsAny().(type) {
428		case anthropic.ToolUseBlock:
429			toolCall := message.ToolCall{
430				ID:       variant.ID,
431				Name:     variant.Name,
432				Input:    string(variant.Input),
433				Type:     string(variant.Type),
434				Finished: true,
435			}
436			toolCalls = append(toolCalls, toolCall)
437		}
438	}
439
440	return toolCalls
441}
442
443func (a *anthropicClient) usage(msg anthropic.Message) TokenUsage {
444	return TokenUsage{
445		InputTokens:         msg.Usage.InputTokens,
446		OutputTokens:        msg.Usage.OutputTokens,
447		CacheCreationTokens: msg.Usage.CacheCreationInputTokens,
448		CacheReadTokens:     msg.Usage.CacheReadInputTokens,
449	}
450}
451
452func WithAnthropicBedrock(useBedrock bool) AnthropicOption {
453	return func(options *anthropicOptions) {
454		options.useBedrock = useBedrock
455	}
456}
457
458func WithAnthropicDisableCache() AnthropicOption {
459	return func(options *anthropicOptions) {
460		options.disableCache = true
461	}
462}
463
464func DefaultShouldThinkFn(s string) bool {
465	return strings.Contains(strings.ToLower(s), "think")
466}
467
468func WithAnthropicShouldThinkFn(fn func(string) bool) AnthropicOption {
469	return func(options *anthropicOptions) {
470		options.shouldThink = fn
471	}
472}