1package provider
  2
  3import (
  4	"context"
  5	"encoding/json"
  6	"errors"
  7	"fmt"
  8	"io"
  9	"strings"
 10	"time"
 11
 12	"github.com/anthropics/anthropic-sdk-go"
 13	"github.com/anthropics/anthropic-sdk-go/bedrock"
 14	"github.com/anthropics/anthropic-sdk-go/option"
 15	"github.com/charmbracelet/crush/internal/config"
 16	"github.com/charmbracelet/crush/internal/fur/provider"
 17	"github.com/charmbracelet/crush/internal/llm/tools"
 18	"github.com/charmbracelet/crush/internal/logging"
 19	"github.com/charmbracelet/crush/internal/message"
 20)
 21
 22type anthropicClient struct {
 23	providerOptions providerClientOptions
 24	useBedrock      bool
 25	client          anthropic.Client
 26}
 27
 28type AnthropicClient ProviderClient
 29
 30func newAnthropicClient(opts providerClientOptions, useBedrock bool) AnthropicClient {
 31	return &anthropicClient{
 32		providerOptions: opts,
 33		client:          createAnthropicClient(opts, useBedrock),
 34	}
 35}
 36
 37func createAnthropicClient(opts providerClientOptions, useBedrock bool) anthropic.Client {
 38	anthropicClientOptions := []option.RequestOption{}
 39	if opts.apiKey != "" {
 40		anthropicClientOptions = append(anthropicClientOptions, option.WithAPIKey(opts.apiKey))
 41	}
 42	if useBedrock {
 43		anthropicClientOptions = append(anthropicClientOptions, bedrock.WithLoadDefaultConfig(context.Background()))
 44	}
 45	return anthropic.NewClient(anthropicClientOptions...)
 46}
 47
 48func (a *anthropicClient) convertMessages(messages []message.Message) (anthropicMessages []anthropic.MessageParam) {
 49	for i, msg := range messages {
 50		cache := false
 51		if i > len(messages)-3 {
 52			cache = true
 53		}
 54		switch msg.Role {
 55		case message.User:
 56			content := anthropic.NewTextBlock(msg.Content().String())
 57			if cache && !a.providerOptions.disableCache {
 58				content.OfText.CacheControl = anthropic.CacheControlEphemeralParam{
 59					Type: "ephemeral",
 60				}
 61			}
 62			var contentBlocks []anthropic.ContentBlockParamUnion
 63			contentBlocks = append(contentBlocks, content)
 64			for _, binaryContent := range msg.BinaryContent() {
 65				base64Image := binaryContent.String(provider.InferenceProviderAnthropic)
 66				imageBlock := anthropic.NewImageBlockBase64(binaryContent.MIMEType, base64Image)
 67				contentBlocks = append(contentBlocks, imageBlock)
 68			}
 69			anthropicMessages = append(anthropicMessages, anthropic.NewUserMessage(contentBlocks...))
 70
 71		case message.Assistant:
 72			blocks := []anthropic.ContentBlockParamUnion{}
 73			if msg.Content().String() != "" {
 74				content := anthropic.NewTextBlock(msg.Content().String())
 75				if cache && !a.providerOptions.disableCache {
 76					content.OfText.CacheControl = anthropic.CacheControlEphemeralParam{
 77						Type: "ephemeral",
 78					}
 79				}
 80				blocks = append(blocks, content)
 81			}
 82
 83			for _, toolCall := range msg.ToolCalls() {
 84				var inputMap map[string]any
 85				err := json.Unmarshal([]byte(toolCall.Input), &inputMap)
 86				if err != nil {
 87					continue
 88				}
 89				blocks = append(blocks, anthropic.NewToolUseBlock(toolCall.ID, inputMap, toolCall.Name))
 90			}
 91
 92			if len(blocks) == 0 {
 93				logging.Warn("There is a message without content, investigate, this should not happen")
 94				continue
 95			}
 96			anthropicMessages = append(anthropicMessages, anthropic.NewAssistantMessage(blocks...))
 97
 98		case message.Tool:
 99			results := make([]anthropic.ContentBlockParamUnion, len(msg.ToolResults()))
100			for i, toolResult := range msg.ToolResults() {
101				results[i] = anthropic.NewToolResultBlock(toolResult.ToolCallID, toolResult.Content, toolResult.IsError)
102			}
103			anthropicMessages = append(anthropicMessages, anthropic.NewUserMessage(results...))
104		}
105	}
106	return
107}
108
109func (a *anthropicClient) convertTools(tools []tools.BaseTool) []anthropic.ToolUnionParam {
110	anthropicTools := make([]anthropic.ToolUnionParam, len(tools))
111
112	for i, tool := range tools {
113		info := tool.Info()
114		toolParam := anthropic.ToolParam{
115			Name:        info.Name,
116			Description: anthropic.String(info.Description),
117			InputSchema: anthropic.ToolInputSchemaParam{
118				Properties: info.Parameters,
119				// TODO: figure out how we can tell claude the required fields?
120			},
121		}
122
123		if i == len(tools)-1 && !a.providerOptions.disableCache {
124			toolParam.CacheControl = anthropic.CacheControlEphemeralParam{
125				Type: "ephemeral",
126			}
127		}
128
129		anthropicTools[i] = anthropic.ToolUnionParam{OfTool: &toolParam}
130	}
131
132	return anthropicTools
133}
134
135func (a *anthropicClient) finishReason(reason string) message.FinishReason {
136	switch reason {
137	case "end_turn":
138		return message.FinishReasonEndTurn
139	case "max_tokens":
140		return message.FinishReasonMaxTokens
141	case "tool_use":
142		return message.FinishReasonToolUse
143	case "stop_sequence":
144		return message.FinishReasonEndTurn
145	default:
146		return message.FinishReasonUnknown
147	}
148}
149
150func (a *anthropicClient) preparedMessages(messages []anthropic.MessageParam, tools []anthropic.ToolUnionParam) anthropic.MessageNewParams {
151	model := a.providerOptions.model(a.providerOptions.modelType)
152	var thinkingParam anthropic.ThinkingConfigParamUnion
153	// TODO: Implement a proper thinking function
154	// lastMessage := messages[len(messages)-1]
155	// isUser := lastMessage.Role == anthropic.MessageParamRoleUser
156	// messageContent := ""
157	temperature := anthropic.Float(0)
158	// if isUser {
159	// 	for _, m := range lastMessage.Content {
160	// 		if m.OfText != nil && m.OfText.Text != "" {
161	// 			messageContent = m.OfText.Text
162	// 		}
163	// 	}
164	// 	if messageContent != "" && a.shouldThink != nil && a.options.shouldThink(messageContent) {
165	// 		thinkingParam = anthropic.ThinkingConfigParamOfEnabled(int64(float64(a.providerOptions.maxTokens) * 0.8))
166	// 		temperature = anthropic.Float(1)
167	// 	}
168	// }
169
170	cfg := config.Get()
171	modelConfig := cfg.Models.Large
172	if a.providerOptions.modelType == config.SmallModel {
173		modelConfig = cfg.Models.Small
174	}
175	maxTokens := model.DefaultMaxTokens
176	if modelConfig.MaxTokens > 0 {
177		maxTokens = modelConfig.MaxTokens
178	}
179
180	// Override max tokens if set in provider options
181	if a.providerOptions.maxTokens > 0 {
182		maxTokens = a.providerOptions.maxTokens
183	}
184
185	return anthropic.MessageNewParams{
186		Model:       anthropic.Model(model.ID),
187		MaxTokens:   maxTokens,
188		Temperature: temperature,
189		Messages:    messages,
190		Tools:       tools,
191		Thinking:    thinkingParam,
192		System: []anthropic.TextBlockParam{
193			{
194				Text: a.providerOptions.systemMessage,
195				CacheControl: anthropic.CacheControlEphemeralParam{
196					Type: "ephemeral",
197				},
198			},
199		},
200	}
201}
202
203func (a *anthropicClient) send(ctx context.Context, messages []message.Message, tools []tools.BaseTool) (response *ProviderResponse, err error) {
204	preparedMessages := a.preparedMessages(a.convertMessages(messages), a.convertTools(tools))
205	cfg := config.Get()
206	if cfg.Options.Debug {
207		jsonData, _ := json.Marshal(preparedMessages)
208		logging.Debug("Prepared messages", "messages", string(jsonData))
209	}
210
211	attempts := 0
212	for {
213		attempts++
214		anthropicResponse, err := a.client.Messages.New(
215			ctx,
216			preparedMessages,
217		)
218		// If there is an error we are going to see if we can retry the call
219		if err != nil {
220			logging.Error("Error in Anthropic API call", "error", err)
221			retry, after, retryErr := a.shouldRetry(attempts, err)
222			if retryErr != nil {
223				return nil, retryErr
224			}
225			if retry {
226				logging.WarnPersist(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries), logging.PersistTimeArg, time.Millisecond*time.Duration(after+100))
227				select {
228				case <-ctx.Done():
229					return nil, ctx.Err()
230				case <-time.After(time.Duration(after) * time.Millisecond):
231					continue
232				}
233			}
234			return nil, retryErr
235		}
236
237		content := ""
238		for _, block := range anthropicResponse.Content {
239			if text, ok := block.AsAny().(anthropic.TextBlock); ok {
240				content += text.Text
241			}
242		}
243
244		return &ProviderResponse{
245			Content:   content,
246			ToolCalls: a.toolCalls(*anthropicResponse),
247			Usage:     a.usage(*anthropicResponse),
248		}, nil
249	}
250}
251
252func (a *anthropicClient) stream(ctx context.Context, messages []message.Message, tools []tools.BaseTool) <-chan ProviderEvent {
253	preparedMessages := a.preparedMessages(a.convertMessages(messages), a.convertTools(tools))
254	cfg := config.Get()
255	if cfg.Options.Debug {
256		// jsonData, _ := json.Marshal(preparedMessages)
257		// logging.Debug("Prepared messages", "messages", string(jsonData))
258	}
259	attempts := 0
260	eventChan := make(chan ProviderEvent)
261	go func() {
262		for {
263			attempts++
264			anthropicStream := a.client.Messages.NewStreaming(
265				ctx,
266				preparedMessages,
267			)
268			accumulatedMessage := anthropic.Message{}
269
270			currentToolCallID := ""
271			for anthropicStream.Next() {
272				event := anthropicStream.Current()
273				err := accumulatedMessage.Accumulate(event)
274				if err != nil {
275					logging.Warn("Error accumulating message", "error", err)
276					continue
277				}
278
279				switch event := event.AsAny().(type) {
280				case anthropic.ContentBlockStartEvent:
281					switch event.ContentBlock.Type {
282					case "text":
283						eventChan <- ProviderEvent{Type: EventContentStart}
284					case "tool_use":
285						currentToolCallID = event.ContentBlock.ID
286						eventChan <- ProviderEvent{
287							Type: EventToolUseStart,
288							ToolCall: &message.ToolCall{
289								ID:       event.ContentBlock.ID,
290								Name:     event.ContentBlock.Name,
291								Finished: false,
292							},
293						}
294					}
295
296				case anthropic.ContentBlockDeltaEvent:
297					if event.Delta.Type == "thinking_delta" && event.Delta.Thinking != "" {
298						eventChan <- ProviderEvent{
299							Type:     EventThinkingDelta,
300							Thinking: event.Delta.Thinking,
301						}
302					} else if event.Delta.Type == "text_delta" && event.Delta.Text != "" {
303						eventChan <- ProviderEvent{
304							Type:    EventContentDelta,
305							Content: event.Delta.Text,
306						}
307					} else if event.Delta.Type == "input_json_delta" {
308						if currentToolCallID != "" {
309							eventChan <- ProviderEvent{
310								Type: EventToolUseDelta,
311								ToolCall: &message.ToolCall{
312									ID:       currentToolCallID,
313									Finished: false,
314									Input:    event.Delta.PartialJSON,
315								},
316							}
317						}
318					}
319				case anthropic.ContentBlockStopEvent:
320					if currentToolCallID != "" {
321						eventChan <- ProviderEvent{
322							Type: EventToolUseStop,
323							ToolCall: &message.ToolCall{
324								ID: currentToolCallID,
325							},
326						}
327						currentToolCallID = ""
328					} else {
329						eventChan <- ProviderEvent{Type: EventContentStop}
330					}
331
332				case anthropic.MessageStopEvent:
333					content := ""
334					for _, block := range accumulatedMessage.Content {
335						if text, ok := block.AsAny().(anthropic.TextBlock); ok {
336							content += text.Text
337						}
338					}
339
340					eventChan <- ProviderEvent{
341						Type: EventComplete,
342						Response: &ProviderResponse{
343							Content:      content,
344							ToolCalls:    a.toolCalls(accumulatedMessage),
345							Usage:        a.usage(accumulatedMessage),
346							FinishReason: a.finishReason(string(accumulatedMessage.StopReason)),
347						},
348						Content: content,
349					}
350				}
351			}
352
353			err := anthropicStream.Err()
354			if err == nil || errors.Is(err, io.EOF) {
355				close(eventChan)
356				return
357			}
358			// If there is an error we are going to see if we can retry the call
359			retry, after, retryErr := a.shouldRetry(attempts, err)
360			if retryErr != nil {
361				eventChan <- ProviderEvent{Type: EventError, Error: retryErr}
362				close(eventChan)
363				return
364			}
365			if retry {
366				logging.WarnPersist(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries), logging.PersistTimeArg, time.Millisecond*time.Duration(after+100))
367				select {
368				case <-ctx.Done():
369					// context cancelled
370					if ctx.Err() != nil {
371						eventChan <- ProviderEvent{Type: EventError, Error: ctx.Err()}
372					}
373					close(eventChan)
374					return
375				case <-time.After(time.Duration(after) * time.Millisecond):
376					continue
377				}
378			}
379			if ctx.Err() != nil {
380				eventChan <- ProviderEvent{Type: EventError, Error: ctx.Err()}
381			}
382
383			close(eventChan)
384			return
385		}
386	}()
387	return eventChan
388}
389
390func (a *anthropicClient) shouldRetry(attempts int, err error) (bool, int64, error) {
391	var apiErr *anthropic.Error
392	if !errors.As(err, &apiErr) {
393		return false, 0, err
394	}
395
396	if apiErr.StatusCode == 401 {
397		a.providerOptions.apiKey, err = config.ResolveAPIKey(a.providerOptions.config.APIKey)
398		if err != nil {
399			return false, 0, fmt.Errorf("failed to resolve API key: %w", err)
400		}
401		a.client = createAnthropicClient(a.providerOptions, a.useBedrock)
402		return true, 0, nil
403	}
404
405	if apiErr.StatusCode != 429 && apiErr.StatusCode != 529 {
406		return false, 0, err
407	}
408
409	if attempts > maxRetries {
410		return false, 0, fmt.Errorf("maximum retry attempts reached for rate limit: %d retries", maxRetries)
411	}
412
413	retryMs := 0
414	retryAfterValues := apiErr.Response.Header.Values("Retry-After")
415
416	backoffMs := 2000 * (1 << (attempts - 1))
417	jitterMs := int(float64(backoffMs) * 0.2)
418	retryMs = backoffMs + jitterMs
419	if len(retryAfterValues) > 0 {
420		if _, err := fmt.Sscanf(retryAfterValues[0], "%d", &retryMs); err == nil {
421			retryMs = retryMs * 1000
422		}
423	}
424	return true, int64(retryMs), nil
425}
426
427func (a *anthropicClient) toolCalls(msg anthropic.Message) []message.ToolCall {
428	var toolCalls []message.ToolCall
429
430	for _, block := range msg.Content {
431		switch variant := block.AsAny().(type) {
432		case anthropic.ToolUseBlock:
433			toolCall := message.ToolCall{
434				ID:       variant.ID,
435				Name:     variant.Name,
436				Input:    string(variant.Input),
437				Type:     string(variant.Type),
438				Finished: true,
439			}
440			toolCalls = append(toolCalls, toolCall)
441		}
442	}
443
444	return toolCalls
445}
446
447func (a *anthropicClient) usage(msg anthropic.Message) TokenUsage {
448	return TokenUsage{
449		InputTokens:         msg.Usage.InputTokens,
450		OutputTokens:        msg.Usage.OutputTokens,
451		CacheCreationTokens: msg.Usage.CacheCreationInputTokens,
452		CacheReadTokens:     msg.Usage.CacheReadInputTokens,
453	}
454}
455
456func (a *anthropicClient) Model() config.Model {
457	return a.providerOptions.model(a.providerOptions.modelType)
458}
459
460// TODO: check if we need
461func DefaultShouldThinkFn(s string) bool {
462	return strings.Contains(strings.ToLower(s), "think")
463}