anthropic.go

  1package providers
  2
  3import (
  4	"context"
  5	"encoding/base64"
  6	"encoding/json"
  7	"errors"
  8	"fmt"
  9	"io"
 10	"maps"
 11	"strings"
 12
 13	"github.com/anthropics/anthropic-sdk-go"
 14	"github.com/anthropics/anthropic-sdk-go/option"
 15	"github.com/anthropics/anthropic-sdk-go/packages/param"
 16	"github.com/charmbracelet/ai"
 17)
 18
 19type AnthropicProviderOptions struct {
 20	SendReasoning          *bool                            `json:"send_reasoning,omitempty"`
 21	Thinking               *AnthropicThinkingProviderOption `json:"thinking,omitempty"`
 22	DisableParallelToolUse *bool                            `json:"disable_parallel_tool_use,omitempty"`
 23}
 24
 25type AnthropicThinkingProviderOption struct {
 26	BudgetTokens int64 `json:"budget_tokens"`
 27}
 28
 29type AnthropicReasoningMetadata struct {
 30	Signature    string `json:"signature"`
 31	RedactedData string `json:"redacted_data"`
 32}
 33
 34type AnthropicCacheControlProviderOptions struct {
 35	Type string `json:"type"`
 36}
 37type AnthropicFilePartProviderOptions struct {
 38	EnableCitations bool   `json:"enable_citations"`
 39	Title           string `json:"title"`
 40	Context         string `json:"context"`
 41}
 42
 43type anthropicProviderOptions struct {
 44	baseURL string
 45	apiKey  string
 46	name    string
 47	headers map[string]string
 48	client  option.HTTPClient
 49}
 50
 51type anthropicProvider struct {
 52	options anthropicProviderOptions
 53}
 54
 55type AnthropicOption = func(*anthropicProviderOptions)
 56
 57func NewAnthropicProvider(opts ...AnthropicOption) ai.Provider {
 58	options := anthropicProviderOptions{
 59		headers: map[string]string{},
 60	}
 61	for _, o := range opts {
 62		o(&options)
 63	}
 64	if options.baseURL == "" {
 65		options.baseURL = "https://api.anthropic.com"
 66	}
 67
 68	if options.name == "" {
 69		options.name = "anthropic"
 70	}
 71
 72	return &anthropicProvider{
 73		options: options,
 74	}
 75}
 76
 77func WithAnthropicBaseURL(baseURL string) AnthropicOption {
 78	return func(o *anthropicProviderOptions) {
 79		o.baseURL = baseURL
 80	}
 81}
 82
 83func WithAnthropicAPIKey(apiKey string) AnthropicOption {
 84	return func(o *anthropicProviderOptions) {
 85		o.apiKey = apiKey
 86	}
 87}
 88
 89func WithAnthropicName(name string) AnthropicOption {
 90	return func(o *anthropicProviderOptions) {
 91		o.name = name
 92	}
 93}
 94
 95func WithAnthropicHeaders(headers map[string]string) AnthropicOption {
 96	return func(o *anthropicProviderOptions) {
 97		maps.Copy(o.headers, headers)
 98	}
 99}
100
101func WithAnthropicHTTPClient(client option.HTTPClient) AnthropicOption {
102	return func(o *anthropicProviderOptions) {
103		o.client = client
104	}
105}
106
107func (a *anthropicProvider) LanguageModel(modelID string) (ai.LanguageModel, error) {
108	anthropicClientOptions := []option.RequestOption{}
109	if a.options.apiKey != "" {
110		anthropicClientOptions = append(anthropicClientOptions, option.WithAPIKey(a.options.apiKey))
111	}
112	if a.options.baseURL != "" {
113		anthropicClientOptions = append(anthropicClientOptions, option.WithBaseURL(a.options.baseURL))
114	}
115
116	for key, value := range a.options.headers {
117		anthropicClientOptions = append(anthropicClientOptions, option.WithHeader(key, value))
118	}
119
120	if a.options.client != nil {
121		anthropicClientOptions = append(anthropicClientOptions, option.WithHTTPClient(a.options.client))
122	}
123	return anthropicLanguageModel{
124		modelID:         modelID,
125		provider:        fmt.Sprintf("%s.messages", a.options.name),
126		providerOptions: a.options,
127		client:          anthropic.NewClient(anthropicClientOptions...),
128	}, nil
129}
130
131type anthropicLanguageModel struct {
132	provider        string
133	modelID         string
134	client          anthropic.Client
135	providerOptions anthropicProviderOptions
136}
137
138// Model implements ai.LanguageModel.
139func (a anthropicLanguageModel) Model() string {
140	return a.modelID
141}
142
143// Provider implements ai.LanguageModel.
144func (a anthropicLanguageModel) Provider() string {
145	return a.provider
146}
147
148func (a anthropicLanguageModel) prepareParams(call ai.Call) (*anthropic.MessageNewParams, []ai.CallWarning, error) {
149	params := &anthropic.MessageNewParams{}
150	providerOptions := &AnthropicProviderOptions{}
151	if v, ok := call.ProviderOptions["anthropic"]; ok {
152		err := ai.ParseOptions(v, providerOptions)
153		if err != nil {
154			return nil, nil, err
155		}
156	}
157	sendReasoning := true
158	if providerOptions.SendReasoning != nil {
159		sendReasoning = *providerOptions.SendReasoning
160	}
161	systemBlocks, messages, warnings := toAnthropicPrompt(call.Prompt, sendReasoning)
162
163	if call.FrequencyPenalty != nil {
164		warnings = append(warnings, ai.CallWarning{
165			Type:    ai.CallWarningTypeUnsupportedSetting,
166			Setting: "FrequencyPenalty",
167		})
168	}
169	if call.PresencePenalty != nil {
170		warnings = append(warnings, ai.CallWarning{
171			Type:    ai.CallWarningTypeUnsupportedSetting,
172			Setting: "PresencePenalty",
173		})
174	}
175
176	params.System = systemBlocks
177	params.Messages = messages
178	params.Model = anthropic.Model(a.modelID)
179	params.MaxTokens = 4096
180
181	if call.MaxOutputTokens != nil {
182		params.MaxTokens = *call.MaxOutputTokens
183	}
184
185	if call.Temperature != nil {
186		params.Temperature = param.NewOpt(*call.Temperature)
187	}
188	if call.TopK != nil {
189		params.TopK = param.NewOpt(*call.TopK)
190	}
191	if call.TopP != nil {
192		params.TopP = param.NewOpt(*call.TopP)
193	}
194
195	isThinking := false
196	var thinkingBudget int64
197	if providerOptions.Thinking != nil {
198		isThinking = true
199		thinkingBudget = providerOptions.Thinking.BudgetTokens
200	}
201	if isThinking {
202		if thinkingBudget == 0 {
203			return nil, nil, ai.NewUnsupportedFunctionalityError("thinking requires budget", "")
204		}
205		params.Thinking = anthropic.ThinkingConfigParamOfEnabled(thinkingBudget)
206		if call.Temperature != nil {
207			params.Temperature = param.Opt[float64]{}
208			warnings = append(warnings, ai.CallWarning{
209				Type:    ai.CallWarningTypeUnsupportedSetting,
210				Setting: "temperature",
211				Details: "temperature is not supported when thinking is enabled",
212			})
213		}
214		if call.TopP != nil {
215			params.TopP = param.Opt[float64]{}
216			warnings = append(warnings, ai.CallWarning{
217				Type:    ai.CallWarningTypeUnsupportedSetting,
218				Setting: "TopP",
219				Details: "TopP is not supported when thinking is enabled",
220			})
221		}
222		if call.TopK != nil {
223			params.TopK = param.Opt[int64]{}
224			warnings = append(warnings, ai.CallWarning{
225				Type:    ai.CallWarningTypeUnsupportedSetting,
226				Setting: "TopK",
227				Details: "TopK is not supported when thinking is enabled",
228			})
229		}
230		params.MaxTokens = params.MaxTokens + thinkingBudget
231	}
232
233	if len(call.Tools) > 0 {
234		disableParallelToolUse := false
235		if providerOptions.DisableParallelToolUse != nil {
236			disableParallelToolUse = *providerOptions.DisableParallelToolUse
237		}
238		tools, toolChoice, toolWarnings := toAnthropicTools(call.Tools, call.ToolChoice, disableParallelToolUse)
239		params.Tools = tools
240		if toolChoice != nil {
241			params.ToolChoice = *toolChoice
242		}
243		warnings = append(warnings, toolWarnings...)
244	}
245
246	return params, warnings, nil
247}
248
249func getCacheControl(providerOptions ai.ProviderOptions) *AnthropicCacheControlProviderOptions {
250	if anthropicOptions, ok := providerOptions["anthropic"]; ok {
251		if cacheControl, ok := anthropicOptions["cache_control"]; ok {
252			if cc, ok := cacheControl.(map[string]any); ok {
253				cacheControlOption := &AnthropicCacheControlProviderOptions{}
254				err := ai.ParseOptions(cc, cacheControlOption)
255				if err != nil {
256					return cacheControlOption
257				}
258			}
259		} else if cacheControl, ok := anthropicOptions["cacheControl"]; ok {
260			if cc, ok := cacheControl.(map[string]any); ok {
261				cacheControlOption := &AnthropicCacheControlProviderOptions{}
262				err := ai.ParseOptions(cc, cacheControlOption)
263				if err != nil {
264					return cacheControlOption
265				}
266			}
267		}
268	}
269	return nil
270}
271
272func getReasoningMetadata(providerOptions ai.ProviderOptions) *AnthropicReasoningMetadata {
273	if anthropicOptions, ok := providerOptions["anthropic"]; ok {
274		reasoningMetadata := &AnthropicReasoningMetadata{}
275		err := ai.ParseOptions(anthropicOptions, reasoningMetadata)
276		if err != nil {
277			return reasoningMetadata
278		}
279	}
280	return nil
281}
282
283type messageBlock struct {
284	Role     ai.MessageRole
285	Messages []ai.Message
286}
287
288func groupIntoBlocks(prompt ai.Prompt) []*messageBlock {
289	var blocks []*messageBlock
290
291	var currentBlock *messageBlock
292
293	for _, msg := range prompt {
294		switch msg.Role {
295		case ai.MessageRoleSystem:
296			if currentBlock == nil || currentBlock.Role != ai.MessageRoleSystem {
297				currentBlock = &messageBlock{
298					Role:     ai.MessageRoleSystem,
299					Messages: []ai.Message{},
300				}
301				blocks = append(blocks, currentBlock)
302			}
303			currentBlock.Messages = append(currentBlock.Messages, msg)
304		case ai.MessageRoleUser:
305			if currentBlock == nil || currentBlock.Role != ai.MessageRoleUser {
306				currentBlock = &messageBlock{
307					Role:     ai.MessageRoleUser,
308					Messages: []ai.Message{},
309				}
310				blocks = append(blocks, currentBlock)
311			}
312			currentBlock.Messages = append(currentBlock.Messages, msg)
313		case ai.MessageRoleAssistant:
314			if currentBlock == nil || currentBlock.Role != ai.MessageRoleAssistant {
315				currentBlock = &messageBlock{
316					Role:     ai.MessageRoleAssistant,
317					Messages: []ai.Message{},
318				}
319				blocks = append(blocks, currentBlock)
320			}
321			currentBlock.Messages = append(currentBlock.Messages, msg)
322		case ai.MessageRoleTool:
323			if currentBlock == nil || currentBlock.Role != ai.MessageRoleUser {
324				currentBlock = &messageBlock{
325					Role:     ai.MessageRoleUser,
326					Messages: []ai.Message{},
327				}
328				blocks = append(blocks, currentBlock)
329			}
330			currentBlock.Messages = append(currentBlock.Messages, msg)
331		}
332	}
333	return blocks
334}
335
336func toAnthropicTools(tools []ai.Tool, toolChoice *ai.ToolChoice, disableParallelToolCalls bool) (anthropicTools []anthropic.ToolUnionParam, anthropicToolChoice *anthropic.ToolChoiceUnionParam, warnings []ai.CallWarning) {
337	for _, tool := range tools {
338		if tool.GetType() == ai.ToolTypeFunction {
339			ft, ok := tool.(ai.FunctionTool)
340			if !ok {
341				continue
342			}
343			required := []string{}
344			var properties any
345			if props, ok := ft.InputSchema["properties"]; ok {
346				properties = props
347			}
348			if req, ok := ft.InputSchema["required"]; ok {
349				if reqArr, ok := req.([]string); ok {
350					required = reqArr
351				}
352			}
353			cacheControl := getCacheControl(ft.ProviderOptions)
354
355			anthropicTool := anthropic.ToolParam{
356				Name:        ft.Name,
357				Description: anthropic.String(ft.Description),
358				InputSchema: anthropic.ToolInputSchemaParam{
359					Properties: properties,
360					Required:   required,
361				},
362			}
363			if cacheControl != nil {
364				anthropicTool.CacheControl = anthropic.NewCacheControlEphemeralParam()
365			}
366			anthropicTools = append(anthropicTools, anthropic.ToolUnionParam{OfTool: &anthropicTool})
367			continue
368		}
369		// TODO: handle provider tool calls
370		warnings = append(warnings, ai.CallWarning{
371			Type:    ai.CallWarningTypeUnsupportedTool,
372			Tool:    tool,
373			Message: "tool is not supported",
374		})
375	}
376	if toolChoice == nil {
377		if disableParallelToolCalls {
378			anthropicToolChoice = &anthropic.ToolChoiceUnionParam{
379				OfAuto: &anthropic.ToolChoiceAutoParam{
380					Type:                   "auto",
381					DisableParallelToolUse: param.NewOpt(disableParallelToolCalls),
382				},
383			}
384		}
385		return anthropicTools, anthropicToolChoice, warnings
386	}
387
388	switch *toolChoice {
389	case ai.ToolChoiceAuto:
390		anthropicToolChoice = &anthropic.ToolChoiceUnionParam{
391			OfAuto: &anthropic.ToolChoiceAutoParam{
392				Type:                   "auto",
393				DisableParallelToolUse: param.NewOpt(disableParallelToolCalls),
394			},
395		}
396	case ai.ToolChoiceRequired:
397		anthropicToolChoice = &anthropic.ToolChoiceUnionParam{
398			OfAny: &anthropic.ToolChoiceAnyParam{
399				Type:                   "any",
400				DisableParallelToolUse: param.NewOpt(disableParallelToolCalls),
401			},
402		}
403	case ai.ToolChoiceNone:
404		return anthropicTools, anthropicToolChoice, warnings
405	default:
406		anthropicToolChoice = &anthropic.ToolChoiceUnionParam{
407			OfTool: &anthropic.ToolChoiceToolParam{
408				Type:                   "tool",
409				Name:                   string(*toolChoice),
410				DisableParallelToolUse: param.NewOpt(disableParallelToolCalls),
411			},
412		}
413	}
414	return anthropicTools, anthropicToolChoice, warnings
415}
416
417func toAnthropicPrompt(prompt ai.Prompt, sendReasoningData bool) ([]anthropic.TextBlockParam, []anthropic.MessageParam, []ai.CallWarning) {
418	var systemBlocks []anthropic.TextBlockParam
419	var messages []anthropic.MessageParam
420	var warnings []ai.CallWarning
421
422	blocks := groupIntoBlocks(prompt)
423	finishedSystemBlock := false
424	for _, block := range blocks {
425		switch block.Role {
426		case ai.MessageRoleSystem:
427			if finishedSystemBlock {
428				// skip multiple system messages that are separated by user/assistant messages
429				// TODO: see if we need to send error here?
430				continue
431			}
432			finishedSystemBlock = true
433			for _, msg := range block.Messages {
434				for _, part := range msg.Content {
435					cacheControl := getCacheControl(part.Options())
436					text, ok := ai.AsMessagePart[ai.TextPart](part)
437					if !ok {
438						continue
439					}
440					textBlock := anthropic.TextBlockParam{
441						Text: text.Text,
442					}
443					if cacheControl != nil {
444						textBlock.CacheControl = anthropic.NewCacheControlEphemeralParam()
445					}
446					systemBlocks = append(systemBlocks, textBlock)
447				}
448			}
449
450		case ai.MessageRoleUser:
451			var anthropicContent []anthropic.ContentBlockParamUnion
452			for _, msg := range block.Messages {
453				if msg.Role == ai.MessageRoleUser {
454					for i, part := range msg.Content {
455						isLastPart := i == len(msg.Content)-1
456						cacheControl := getCacheControl(part.Options())
457						if cacheControl == nil && isLastPart {
458							cacheControl = getCacheControl(msg.ProviderOptions)
459						}
460						switch part.GetType() {
461						case ai.ContentTypeText:
462							text, ok := ai.AsMessagePart[ai.TextPart](part)
463							if !ok {
464								continue
465							}
466							textBlock := &anthropic.TextBlockParam{
467								Text: text.Text,
468							}
469							if cacheControl != nil {
470								textBlock.CacheControl = anthropic.NewCacheControlEphemeralParam()
471							}
472							anthropicContent = append(anthropicContent, anthropic.ContentBlockParamUnion{
473								OfText: textBlock,
474							})
475						case ai.ContentTypeFile:
476							file, ok := ai.AsMessagePart[ai.FilePart](part)
477							if !ok {
478								continue
479							}
480							// TODO: handle other file types
481							if !strings.HasPrefix(file.MediaType, "image/") {
482								continue
483							}
484
485							base64Encoded := base64.StdEncoding.EncodeToString(file.Data)
486							imageBlock := anthropic.NewImageBlockBase64(file.MediaType, base64Encoded)
487							if cacheControl != nil {
488								imageBlock.OfImage.CacheControl = anthropic.NewCacheControlEphemeralParam()
489							}
490							anthropicContent = append(anthropicContent, imageBlock)
491						}
492					}
493				} else if msg.Role == ai.MessageRoleTool {
494					for i, part := range msg.Content {
495						isLastPart := i == len(msg.Content)-1
496						cacheControl := getCacheControl(part.Options())
497						if cacheControl == nil && isLastPart {
498							cacheControl = getCacheControl(msg.ProviderOptions)
499						}
500						result, ok := ai.AsMessagePart[ai.ToolResultPart](part)
501						if !ok {
502							continue
503						}
504						toolResultBlock := anthropic.ToolResultBlockParam{
505							ToolUseID: result.ToolCallID,
506						}
507						switch result.Output.GetType() {
508						case ai.ToolResultContentTypeText:
509							content, ok := ai.AsToolResultOutputType[ai.ToolResultOutputContentText](result.Output)
510							if !ok {
511								continue
512							}
513							toolResultBlock.Content = []anthropic.ToolResultBlockParamContentUnion{
514								{
515									OfText: &anthropic.TextBlockParam{
516										Text: content.Text,
517									},
518								},
519							}
520						case ai.ToolResultContentTypeMedia:
521							content, ok := ai.AsToolResultOutputType[ai.ToolResultOutputContentMedia](result.Output)
522							if !ok {
523								continue
524							}
525							toolResultBlock.Content = []anthropic.ToolResultBlockParamContentUnion{
526								{
527									OfImage: anthropic.NewImageBlockBase64(content.MediaType, content.Data).OfImage,
528								},
529							}
530						case ai.ToolResultContentTypeError:
531							content, ok := ai.AsToolResultOutputType[ai.ToolResultOutputContentError](result.Output)
532							if !ok {
533								continue
534							}
535							toolResultBlock.Content = []anthropic.ToolResultBlockParamContentUnion{
536								{
537									OfText: &anthropic.TextBlockParam{
538										Text: content.Error.Error(),
539									},
540								},
541							}
542							toolResultBlock.IsError = param.NewOpt(true)
543						}
544						if cacheControl != nil {
545							toolResultBlock.CacheControl = anthropic.NewCacheControlEphemeralParam()
546						}
547						anthropicContent = append(anthropicContent, anthropic.ContentBlockParamUnion{
548							OfToolResult: &toolResultBlock,
549						})
550					}
551				}
552			}
553			messages = append(messages, anthropic.NewUserMessage(anthropicContent...))
554		case ai.MessageRoleAssistant:
555			var anthropicContent []anthropic.ContentBlockParamUnion
556			for _, msg := range block.Messages {
557				for i, part := range msg.Content {
558					isLastPart := i == len(msg.Content)-1
559					cacheControl := getCacheControl(part.Options())
560					if cacheControl == nil && isLastPart {
561						cacheControl = getCacheControl(msg.ProviderOptions)
562					}
563					switch part.GetType() {
564					case ai.ContentTypeText:
565						text, ok := ai.AsMessagePart[ai.TextPart](part)
566						if !ok {
567							continue
568						}
569						textBlock := &anthropic.TextBlockParam{
570							Text: text.Text,
571						}
572						if cacheControl != nil {
573							textBlock.CacheControl = anthropic.NewCacheControlEphemeralParam()
574						}
575						anthropicContent = append(anthropicContent, anthropic.ContentBlockParamUnion{
576							OfText: textBlock,
577						})
578					case ai.ContentTypeReasoning:
579						reasoning, ok := ai.AsMessagePart[ai.ReasoningPart](part)
580						if !ok {
581							continue
582						}
583						if !sendReasoningData {
584							warnings = append(warnings, ai.CallWarning{
585								Type:    "other",
586								Message: "sending reasoning content is disabled for this model",
587							})
588							continue
589						}
590						reasoningMetadata := getReasoningMetadata(part.Options())
591						if reasoningMetadata == nil {
592							warnings = append(warnings, ai.CallWarning{
593								Type:    "other",
594								Message: "unsupported reasoning metadata",
595							})
596							continue
597						}
598
599						if reasoningMetadata.Signature != "" {
600							anthropicContent = append(anthropicContent, anthropic.NewThinkingBlock(reasoningMetadata.Signature, reasoning.Text))
601						} else if reasoningMetadata.RedactedData != "" {
602							anthropicContent = append(anthropicContent, anthropic.NewRedactedThinkingBlock(reasoningMetadata.RedactedData))
603						} else {
604							warnings = append(warnings, ai.CallWarning{
605								Type:    "other",
606								Message: "unsupported reasoning metadata",
607							})
608							continue
609						}
610					case ai.ContentTypeToolCall:
611						toolCall, ok := ai.AsMessagePart[ai.ToolCallPart](part)
612						if !ok {
613							continue
614						}
615						if toolCall.ProviderExecuted {
616							// TODO: implement provider executed call
617							continue
618						}
619
620						var inputMap map[string]any
621						err := json.Unmarshal([]byte(toolCall.Input), &inputMap)
622						if err != nil {
623							continue
624						}
625						toolUseBlock := anthropic.NewToolUseBlock(toolCall.ToolCallID, inputMap, toolCall.ToolName)
626						if cacheControl != nil {
627							toolUseBlock.OfToolUse.CacheControl = anthropic.NewCacheControlEphemeralParam()
628						}
629						anthropicContent = append(anthropicContent, toolUseBlock)
630					case ai.ContentTypeToolResult:
631						// TODO: implement provider executed tool result
632					}
633				}
634			}
635			messages = append(messages, anthropic.NewAssistantMessage(anthropicContent...))
636		}
637	}
638	return systemBlocks, messages, warnings
639}
640
641func (o anthropicLanguageModel) handleError(err error) error {
642	var apiErr *anthropic.Error
643	if errors.As(err, &apiErr) {
644		requestDump := apiErr.DumpRequest(true)
645		responseDump := apiErr.DumpResponse(true)
646		headers := map[string]string{}
647		for k, h := range apiErr.Response.Header {
648			v := h[len(h)-1]
649			headers[strings.ToLower(k)] = v
650		}
651		return ai.NewAPICallError(
652			apiErr.Error(),
653			apiErr.Request.URL.String(),
654			string(requestDump),
655			apiErr.StatusCode,
656			headers,
657			string(responseDump),
658			apiErr,
659			false,
660		)
661	}
662	return err
663}
664
665func mapAnthropicFinishReason(finishReason string) ai.FinishReason {
666	switch finishReason {
667	case "end", "stop_sequence":
668		return ai.FinishReasonStop
669	case "max_tokens":
670		return ai.FinishReasonLength
671	case "tool_use":
672		return ai.FinishReasonToolCalls
673	default:
674		return ai.FinishReasonUnknown
675	}
676}
677
678// Generate implements ai.LanguageModel.
679func (a anthropicLanguageModel) Generate(ctx context.Context, call ai.Call) (*ai.Response, error) {
680	params, warnings, err := a.prepareParams(call)
681	if err != nil {
682		return nil, err
683	}
684	response, err := a.client.Messages.New(ctx, *params)
685	if err != nil {
686		return nil, a.handleError(err)
687	}
688
689	var content []ai.Content
690	for _, block := range response.Content {
691		switch block.Type {
692		case "text":
693			text, ok := block.AsAny().(anthropic.TextBlock)
694			if !ok {
695				continue
696			}
697			content = append(content, ai.TextContent{
698				Text: text.Text,
699			})
700		case "thinking":
701			reasoning, ok := block.AsAny().(anthropic.ThinkingBlock)
702			if !ok {
703				continue
704			}
705			content = append(content, ai.ReasoningContent{
706				Text: reasoning.Thinking,
707				ProviderMetadata: map[string]map[string]any{
708					"anthropic": {
709						"signature": reasoning.Signature,
710					},
711				},
712			})
713		case "redacted_thinking":
714			reasoning, ok := block.AsAny().(anthropic.RedactedThinkingBlock)
715			if !ok {
716				continue
717			}
718			content = append(content, ai.ReasoningContent{
719				Text: "",
720				ProviderMetadata: map[string]map[string]any{
721					"anthropic": {
722						"redacted_data": reasoning.Data,
723					},
724				},
725			})
726		case "tool_use":
727			toolUse, ok := block.AsAny().(anthropic.ToolUseBlock)
728			if !ok {
729				continue
730			}
731			content = append(content, ai.ToolCallContent{
732				ToolCallID:       toolUse.ID,
733				ToolName:         toolUse.Name,
734				Input:            string(toolUse.Input),
735				ProviderExecuted: false,
736			})
737		}
738	}
739
740	return &ai.Response{
741		Content: content,
742		Usage: ai.Usage{
743			InputTokens:         response.Usage.InputTokens,
744			OutputTokens:        response.Usage.OutputTokens,
745			TotalTokens:         response.Usage.InputTokens + response.Usage.OutputTokens,
746			CacheCreationTokens: response.Usage.CacheCreationInputTokens,
747			CacheReadTokens:     response.Usage.CacheReadInputTokens,
748		},
749		FinishReason: mapAnthropicFinishReason(string(response.StopReason)),
750		ProviderMetadata: ai.ProviderMetadata{
751			"anthropic": make(map[string]any),
752		},
753		Warnings: warnings,
754	}, nil
755}
756
757// Stream implements ai.LanguageModel.
758func (a anthropicLanguageModel) Stream(ctx context.Context, call ai.Call) (ai.StreamResponse, error) {
759	params, warnings, err := a.prepareParams(call)
760	if err != nil {
761		return nil, err
762	}
763
764	stream := a.client.Messages.NewStreaming(ctx, *params)
765	acc := anthropic.Message{}
766	return func(yield func(ai.StreamPart) bool) {
767		if len(warnings) > 0 {
768			if !yield(ai.StreamPart{
769				Type:     ai.StreamPartTypeWarnings,
770				Warnings: warnings,
771			}) {
772				return
773			}
774		}
775
776		for stream.Next() {
777			chunk := stream.Current()
778			acc.Accumulate(chunk)
779			switch chunk.Type {
780			case "content_block_start":
781				contentBlockType := chunk.ContentBlock.Type
782				switch contentBlockType {
783				case "text":
784					if !yield(ai.StreamPart{
785						Type: ai.StreamPartTypeTextStart,
786						ID:   fmt.Sprintf("%d", chunk.Index),
787					}) {
788						return
789					}
790				case "thinking":
791					if !yield(ai.StreamPart{
792						Type: ai.StreamPartTypeReasoningStart,
793						ID:   fmt.Sprintf("%d", chunk.Index),
794					}) {
795						return
796					}
797				case "redacted_thinking":
798					if !yield(ai.StreamPart{
799						Type: ai.StreamPartTypeReasoningStart,
800						ID:   fmt.Sprintf("%d", chunk.Index),
801						ProviderMetadata: ai.ProviderMetadata{
802							"anthropic": {
803								"redacted_data": chunk.ContentBlock.Data,
804							},
805						},
806					}) {
807						return
808					}
809				case "tool_use":
810					if !yield(ai.StreamPart{
811						Type:          ai.StreamPartTypeToolInputStart,
812						ID:            chunk.ContentBlock.ID,
813						ToolCallName:  chunk.ContentBlock.Name,
814						ToolCallInput: "",
815					}) {
816						return
817					}
818				}
819			case "content_block_stop":
820				if len(acc.Content)-1 < int(chunk.Index) {
821					continue
822				}
823				contentBlock := acc.Content[int(chunk.Index)]
824				switch contentBlock.Type {
825				case "text":
826					if !yield(ai.StreamPart{
827						Type: ai.StreamPartTypeTextEnd,
828						ID:   fmt.Sprintf("%d", chunk.Index),
829					}) {
830						return
831					}
832				case "thinking":
833					if !yield(ai.StreamPart{
834						Type: ai.StreamPartTypeReasoningEnd,
835						ID:   fmt.Sprintf("%d", chunk.Index),
836					}) {
837						return
838					}
839				case "tool_use":
840					if !yield(ai.StreamPart{
841						Type: ai.StreamPartTypeToolInputEnd,
842						ID:   contentBlock.ID,
843					}) {
844						return
845					}
846					if !yield(ai.StreamPart{
847						Type:          ai.StreamPartTypeToolCall,
848						ID:            contentBlock.ID,
849						ToolCallName:  contentBlock.Name,
850						ToolCallInput: string(contentBlock.Input),
851					}) {
852						return
853					}
854				}
855			case "content_block_delta":
856				switch chunk.Delta.Type {
857				case "text_delta":
858					if !yield(ai.StreamPart{
859						Type:  ai.StreamPartTypeTextDelta,
860						ID:    fmt.Sprintf("%d", chunk.Index),
861						Delta: chunk.Delta.Text,
862					}) {
863						return
864					}
865				case "thinking_delta":
866					if !yield(ai.StreamPart{
867						Type:  ai.StreamPartTypeReasoningDelta,
868						ID:    fmt.Sprintf("%d", chunk.Index),
869						Delta: chunk.Delta.Text,
870					}) {
871						return
872					}
873				case "signature_delta":
874					if !yield(ai.StreamPart{
875						Type: ai.StreamPartTypeReasoningDelta,
876						ID:   fmt.Sprintf("%d", chunk.Index),
877						ProviderMetadata: ai.ProviderMetadata{
878							"anthropic": {
879								"signature": chunk.Delta.Signature,
880							},
881						},
882					}) {
883						return
884					}
885				case "input_json_delta":
886					if len(acc.Content)-1 < int(chunk.Index) {
887						continue
888					}
889					contentBlock := acc.Content[int(chunk.Index)]
890					if !yield(ai.StreamPart{
891						Type:          ai.StreamPartTypeToolInputDelta,
892						ID:            contentBlock.ID,
893						ToolCallInput: chunk.Delta.PartialJSON,
894					}) {
895						return
896					}
897				}
898			case "message_stop":
899			}
900		}
901
902		err := stream.Err()
903		if err == nil || errors.Is(err, io.EOF) {
904			yield(ai.StreamPart{
905				Type:         ai.StreamPartTypeFinish,
906				ID:           acc.ID,
907				FinishReason: mapAnthropicFinishReason(string(acc.StopReason)),
908				Usage: ai.Usage{
909					InputTokens:         acc.Usage.InputTokens,
910					OutputTokens:        acc.Usage.OutputTokens,
911					TotalTokens:         acc.Usage.InputTokens + acc.Usage.OutputTokens,
912					CacheCreationTokens: acc.Usage.CacheCreationInputTokens,
913					CacheReadTokens:     acc.Usage.CacheReadInputTokens,
914				},
915				ProviderMetadata: ai.ProviderMetadata{
916					"anthropic": make(map[string]any),
917				},
918			})
919			return
920		} else {
921			yield(ai.StreamPart{
922				Type:  ai.StreamPartTypeError,
923				Error: a.handleError(err),
924			})
925			return
926		}
927	}, nil
928}