anthropic.go

  1package anthropic
  2
  3import (
  4	"context"
  5	"encoding/base64"
  6	"encoding/json"
  7	"errors"
  8	"fmt"
  9	"io"
 10	"maps"
 11	"strings"
 12
 13	"github.com/anthropics/anthropic-sdk-go"
 14	"github.com/anthropics/anthropic-sdk-go/option"
 15	"github.com/anthropics/anthropic-sdk-go/packages/param"
 16	"github.com/charmbracelet/ai"
 17)
 18
 19type options struct {
 20	baseURL string
 21	apiKey  string
 22	name    string
 23	headers map[string]string
 24	client  option.HTTPClient
 25}
 26
 27type provider struct {
 28	options options
 29}
 30
 31type Option = func(*options)
 32
 33func New(opts ...Option) ai.Provider {
 34	options := options{
 35		headers: map[string]string{},
 36	}
 37	for _, o := range opts {
 38		o(&options)
 39	}
 40	if options.baseURL == "" {
 41		options.baseURL = "https://api.anthropic.com"
 42	}
 43
 44	if options.name == "" {
 45		options.name = "anthropic"
 46	}
 47
 48	return &provider{
 49		options: options,
 50	}
 51}
 52
 53func WithBaseURL(baseURL string) Option {
 54	return func(o *options) {
 55		o.baseURL = baseURL
 56	}
 57}
 58
 59func WithAPIKey(apiKey string) Option {
 60	return func(o *options) {
 61		o.apiKey = apiKey
 62	}
 63}
 64
 65func WithName(name string) Option {
 66	return func(o *options) {
 67		o.name = name
 68	}
 69}
 70
 71func WithHeaders(headers map[string]string) Option {
 72	return func(o *options) {
 73		maps.Copy(o.headers, headers)
 74	}
 75}
 76
 77func WithHTTPClient(client option.HTTPClient) Option {
 78	return func(o *options) {
 79		o.client = client
 80	}
 81}
 82
 83func (a *provider) LanguageModel(modelID string) (ai.LanguageModel, error) {
 84	anthropicClientOptions := []option.RequestOption{}
 85	if a.options.apiKey != "" {
 86		anthropicClientOptions = append(anthropicClientOptions, option.WithAPIKey(a.options.apiKey))
 87	}
 88	if a.options.baseURL != "" {
 89		anthropicClientOptions = append(anthropicClientOptions, option.WithBaseURL(a.options.baseURL))
 90	}
 91
 92	for key, value := range a.options.headers {
 93		anthropicClientOptions = append(anthropicClientOptions, option.WithHeader(key, value))
 94	}
 95
 96	if a.options.client != nil {
 97		anthropicClientOptions = append(anthropicClientOptions, option.WithHTTPClient(a.options.client))
 98	}
 99	return languageModel{
100		modelID:  modelID,
101		provider: fmt.Sprintf("%s.messages", a.options.name),
102		options:  a.options,
103		client:   anthropic.NewClient(anthropicClientOptions...),
104	}, nil
105}
106
107type languageModel struct {
108	provider string
109	modelID  string
110	client   anthropic.Client
111	options  options
112}
113
114// Model implements ai.LanguageModel.
115func (a languageModel) Model() string {
116	return a.modelID
117}
118
119// Provider implements ai.LanguageModel.
120func (a languageModel) Provider() string {
121	return a.provider
122}
123
124func (a languageModel) prepareParams(call ai.Call) (*anthropic.MessageNewParams, []ai.CallWarning, error) {
125	params := &anthropic.MessageNewParams{}
126	providerOptions := &providerOptions{}
127	if v, ok := call.ProviderOptions["anthropic"]; ok {
128		err := ai.ParseOptions(v, providerOptions)
129		if err != nil {
130			return nil, nil, err
131		}
132	}
133	sendReasoning := true
134	if providerOptions.SendReasoning != nil {
135		sendReasoning = *providerOptions.SendReasoning
136	}
137	systemBlocks, messages, warnings := toPrompt(call.Prompt, sendReasoning)
138
139	if call.FrequencyPenalty != nil {
140		warnings = append(warnings, ai.CallWarning{
141			Type:    ai.CallWarningTypeUnsupportedSetting,
142			Setting: "FrequencyPenalty",
143		})
144	}
145	if call.PresencePenalty != nil {
146		warnings = append(warnings, ai.CallWarning{
147			Type:    ai.CallWarningTypeUnsupportedSetting,
148			Setting: "PresencePenalty",
149		})
150	}
151
152	params.System = systemBlocks
153	params.Messages = messages
154	params.Model = anthropic.Model(a.modelID)
155	params.MaxTokens = 4096
156
157	if call.MaxOutputTokens != nil {
158		params.MaxTokens = *call.MaxOutputTokens
159	}
160
161	if call.Temperature != nil {
162		params.Temperature = param.NewOpt(*call.Temperature)
163	}
164	if call.TopK != nil {
165		params.TopK = param.NewOpt(*call.TopK)
166	}
167	if call.TopP != nil {
168		params.TopP = param.NewOpt(*call.TopP)
169	}
170
171	isThinking := false
172	var thinkingBudget int64
173	if providerOptions.Thinking != nil {
174		isThinking = true
175		thinkingBudget = providerOptions.Thinking.BudgetTokens
176	}
177	if isThinking {
178		if thinkingBudget == 0 {
179			return nil, nil, ai.NewUnsupportedFunctionalityError("thinking requires budget", "")
180		}
181		params.Thinking = anthropic.ThinkingConfigParamOfEnabled(thinkingBudget)
182		if call.Temperature != nil {
183			params.Temperature = param.Opt[float64]{}
184			warnings = append(warnings, ai.CallWarning{
185				Type:    ai.CallWarningTypeUnsupportedSetting,
186				Setting: "temperature",
187				Details: "temperature is not supported when thinking is enabled",
188			})
189		}
190		if call.TopP != nil {
191			params.TopP = param.Opt[float64]{}
192			warnings = append(warnings, ai.CallWarning{
193				Type:    ai.CallWarningTypeUnsupportedSetting,
194				Setting: "TopP",
195				Details: "TopP is not supported when thinking is enabled",
196			})
197		}
198		if call.TopK != nil {
199			params.TopK = param.Opt[int64]{}
200			warnings = append(warnings, ai.CallWarning{
201				Type:    ai.CallWarningTypeUnsupportedSetting,
202				Setting: "TopK",
203				Details: "TopK is not supported when thinking is enabled",
204			})
205		}
206		params.MaxTokens = params.MaxTokens + thinkingBudget
207	}
208
209	if len(call.Tools) > 0 {
210		disableParallelToolUse := false
211		if providerOptions.DisableParallelToolUse != nil {
212			disableParallelToolUse = *providerOptions.DisableParallelToolUse
213		}
214		tools, toolChoice, toolWarnings := toTools(call.Tools, call.ToolChoice, disableParallelToolUse)
215		params.Tools = tools
216		if toolChoice != nil {
217			params.ToolChoice = *toolChoice
218		}
219		warnings = append(warnings, toolWarnings...)
220	}
221
222	return params, warnings, nil
223}
224
225func getCacheControl(providerOptions ai.ProviderOptions) *cacheControlProviderOptions {
226	if anthropicOptions, ok := providerOptions["anthropic"]; ok {
227		if cacheControl, ok := anthropicOptions["cache_control"]; ok {
228			if cc, ok := cacheControl.(map[string]any); ok {
229				cacheControlOption := &cacheControlProviderOptions{}
230				err := ai.ParseOptions(cc, cacheControlOption)
231				if err != nil {
232					return cacheControlOption
233				}
234			}
235		} else if cacheControl, ok := anthropicOptions["cacheControl"]; ok {
236			if cc, ok := cacheControl.(map[string]any); ok {
237				cacheControlOption := &cacheControlProviderOptions{}
238				err := ai.ParseOptions(cc, cacheControlOption)
239				if err != nil {
240					return cacheControlOption
241				}
242			}
243		}
244	}
245	return nil
246}
247
248func getReasoningMetadata(providerOptions ai.ProviderOptions) *reasoningMetadata {
249	if anthropicOptions, ok := providerOptions["anthropic"]; ok {
250		reasoningMetadata := &reasoningMetadata{}
251		err := ai.ParseOptions(anthropicOptions, reasoningMetadata)
252		if err != nil {
253			return reasoningMetadata
254		}
255	}
256	return nil
257}
258
259type messageBlock struct {
260	Role     ai.MessageRole
261	Messages []ai.Message
262}
263
264func groupIntoBlocks(prompt ai.Prompt) []*messageBlock {
265	var blocks []*messageBlock
266
267	var currentBlock *messageBlock
268
269	for _, msg := range prompt {
270		switch msg.Role {
271		case ai.MessageRoleSystem:
272			if currentBlock == nil || currentBlock.Role != ai.MessageRoleSystem {
273				currentBlock = &messageBlock{
274					Role:     ai.MessageRoleSystem,
275					Messages: []ai.Message{},
276				}
277				blocks = append(blocks, currentBlock)
278			}
279			currentBlock.Messages = append(currentBlock.Messages, msg)
280		case ai.MessageRoleUser:
281			if currentBlock == nil || currentBlock.Role != ai.MessageRoleUser {
282				currentBlock = &messageBlock{
283					Role:     ai.MessageRoleUser,
284					Messages: []ai.Message{},
285				}
286				blocks = append(blocks, currentBlock)
287			}
288			currentBlock.Messages = append(currentBlock.Messages, msg)
289		case ai.MessageRoleAssistant:
290			if currentBlock == nil || currentBlock.Role != ai.MessageRoleAssistant {
291				currentBlock = &messageBlock{
292					Role:     ai.MessageRoleAssistant,
293					Messages: []ai.Message{},
294				}
295				blocks = append(blocks, currentBlock)
296			}
297			currentBlock.Messages = append(currentBlock.Messages, msg)
298		case ai.MessageRoleTool:
299			if currentBlock == nil || currentBlock.Role != ai.MessageRoleUser {
300				currentBlock = &messageBlock{
301					Role:     ai.MessageRoleUser,
302					Messages: []ai.Message{},
303				}
304				blocks = append(blocks, currentBlock)
305			}
306			currentBlock.Messages = append(currentBlock.Messages, msg)
307		}
308	}
309	return blocks
310}
311
312func toTools(tools []ai.Tool, toolChoice *ai.ToolChoice, disableParallelToolCalls bool) (anthropicTools []anthropic.ToolUnionParam, anthropicToolChoice *anthropic.ToolChoiceUnionParam, warnings []ai.CallWarning) {
313	for _, tool := range tools {
314		if tool.GetType() == ai.ToolTypeFunction {
315			ft, ok := tool.(ai.FunctionTool)
316			if !ok {
317				continue
318			}
319			required := []string{}
320			var properties any
321			if props, ok := ft.InputSchema["properties"]; ok {
322				properties = props
323			}
324			if req, ok := ft.InputSchema["required"]; ok {
325				if reqArr, ok := req.([]string); ok {
326					required = reqArr
327				}
328			}
329			cacheControl := getCacheControl(ft.ProviderOptions)
330
331			anthropicTool := anthropic.ToolParam{
332				Name:        ft.Name,
333				Description: anthropic.String(ft.Description),
334				InputSchema: anthropic.ToolInputSchemaParam{
335					Properties: properties,
336					Required:   required,
337				},
338			}
339			if cacheControl != nil {
340				anthropicTool.CacheControl = anthropic.NewCacheControlEphemeralParam()
341			}
342			anthropicTools = append(anthropicTools, anthropic.ToolUnionParam{OfTool: &anthropicTool})
343			continue
344		}
345		// TODO: handle provider tool calls
346		warnings = append(warnings, ai.CallWarning{
347			Type:    ai.CallWarningTypeUnsupportedTool,
348			Tool:    tool,
349			Message: "tool is not supported",
350		})
351	}
352	if toolChoice == nil {
353		if disableParallelToolCalls {
354			anthropicToolChoice = &anthropic.ToolChoiceUnionParam{
355				OfAuto: &anthropic.ToolChoiceAutoParam{
356					Type:                   "auto",
357					DisableParallelToolUse: param.NewOpt(disableParallelToolCalls),
358				},
359			}
360		}
361		return anthropicTools, anthropicToolChoice, warnings
362	}
363
364	switch *toolChoice {
365	case ai.ToolChoiceAuto:
366		anthropicToolChoice = &anthropic.ToolChoiceUnionParam{
367			OfAuto: &anthropic.ToolChoiceAutoParam{
368				Type:                   "auto",
369				DisableParallelToolUse: param.NewOpt(disableParallelToolCalls),
370			},
371		}
372	case ai.ToolChoiceRequired:
373		anthropicToolChoice = &anthropic.ToolChoiceUnionParam{
374			OfAny: &anthropic.ToolChoiceAnyParam{
375				Type:                   "any",
376				DisableParallelToolUse: param.NewOpt(disableParallelToolCalls),
377			},
378		}
379	case ai.ToolChoiceNone:
380		return anthropicTools, anthropicToolChoice, warnings
381	default:
382		anthropicToolChoice = &anthropic.ToolChoiceUnionParam{
383			OfTool: &anthropic.ToolChoiceToolParam{
384				Type:                   "tool",
385				Name:                   string(*toolChoice),
386				DisableParallelToolUse: param.NewOpt(disableParallelToolCalls),
387			},
388		}
389	}
390	return anthropicTools, anthropicToolChoice, warnings
391}
392
393func toPrompt(prompt ai.Prompt, sendReasoningData bool) ([]anthropic.TextBlockParam, []anthropic.MessageParam, []ai.CallWarning) {
394	var systemBlocks []anthropic.TextBlockParam
395	var messages []anthropic.MessageParam
396	var warnings []ai.CallWarning
397
398	blocks := groupIntoBlocks(prompt)
399	finishedSystemBlock := false
400	for _, block := range blocks {
401		switch block.Role {
402		case ai.MessageRoleSystem:
403			if finishedSystemBlock {
404				// skip multiple system messages that are separated by user/assistant messages
405				// TODO: see if we need to send error here?
406				continue
407			}
408			finishedSystemBlock = true
409			for _, msg := range block.Messages {
410				for _, part := range msg.Content {
411					cacheControl := getCacheControl(part.Options())
412					text, ok := ai.AsMessagePart[ai.TextPart](part)
413					if !ok {
414						continue
415					}
416					textBlock := anthropic.TextBlockParam{
417						Text: text.Text,
418					}
419					if cacheControl != nil {
420						textBlock.CacheControl = anthropic.NewCacheControlEphemeralParam()
421					}
422					systemBlocks = append(systemBlocks, textBlock)
423				}
424			}
425
426		case ai.MessageRoleUser:
427			var anthropicContent []anthropic.ContentBlockParamUnion
428			for _, msg := range block.Messages {
429				if msg.Role == ai.MessageRoleUser {
430					for i, part := range msg.Content {
431						isLastPart := i == len(msg.Content)-1
432						cacheControl := getCacheControl(part.Options())
433						if cacheControl == nil && isLastPart {
434							cacheControl = getCacheControl(msg.ProviderOptions)
435						}
436						switch part.GetType() {
437						case ai.ContentTypeText:
438							text, ok := ai.AsMessagePart[ai.TextPart](part)
439							if !ok {
440								continue
441							}
442							textBlock := &anthropic.TextBlockParam{
443								Text: text.Text,
444							}
445							if cacheControl != nil {
446								textBlock.CacheControl = anthropic.NewCacheControlEphemeralParam()
447							}
448							anthropicContent = append(anthropicContent, anthropic.ContentBlockParamUnion{
449								OfText: textBlock,
450							})
451						case ai.ContentTypeFile:
452							file, ok := ai.AsMessagePart[ai.FilePart](part)
453							if !ok {
454								continue
455							}
456							// TODO: handle other file types
457							if !strings.HasPrefix(file.MediaType, "image/") {
458								continue
459							}
460
461							base64Encoded := base64.StdEncoding.EncodeToString(file.Data)
462							imageBlock := anthropic.NewImageBlockBase64(file.MediaType, base64Encoded)
463							if cacheControl != nil {
464								imageBlock.OfImage.CacheControl = anthropic.NewCacheControlEphemeralParam()
465							}
466							anthropicContent = append(anthropicContent, imageBlock)
467						}
468					}
469				} else if msg.Role == ai.MessageRoleTool {
470					for i, part := range msg.Content {
471						isLastPart := i == len(msg.Content)-1
472						cacheControl := getCacheControl(part.Options())
473						if cacheControl == nil && isLastPart {
474							cacheControl = getCacheControl(msg.ProviderOptions)
475						}
476						result, ok := ai.AsMessagePart[ai.ToolResultPart](part)
477						if !ok {
478							continue
479						}
480						toolResultBlock := anthropic.ToolResultBlockParam{
481							ToolUseID: result.ToolCallID,
482						}
483						switch result.Output.GetType() {
484						case ai.ToolResultContentTypeText:
485							content, ok := ai.AsToolResultOutputType[ai.ToolResultOutputContentText](result.Output)
486							if !ok {
487								continue
488							}
489							toolResultBlock.Content = []anthropic.ToolResultBlockParamContentUnion{
490								{
491									OfText: &anthropic.TextBlockParam{
492										Text: content.Text,
493									},
494								},
495							}
496						case ai.ToolResultContentTypeMedia:
497							content, ok := ai.AsToolResultOutputType[ai.ToolResultOutputContentMedia](result.Output)
498							if !ok {
499								continue
500							}
501							toolResultBlock.Content = []anthropic.ToolResultBlockParamContentUnion{
502								{
503									OfImage: anthropic.NewImageBlockBase64(content.MediaType, content.Data).OfImage,
504								},
505							}
506						case ai.ToolResultContentTypeError:
507							content, ok := ai.AsToolResultOutputType[ai.ToolResultOutputContentError](result.Output)
508							if !ok {
509								continue
510							}
511							toolResultBlock.Content = []anthropic.ToolResultBlockParamContentUnion{
512								{
513									OfText: &anthropic.TextBlockParam{
514										Text: content.Error.Error(),
515									},
516								},
517							}
518							toolResultBlock.IsError = param.NewOpt(true)
519						}
520						if cacheControl != nil {
521							toolResultBlock.CacheControl = anthropic.NewCacheControlEphemeralParam()
522						}
523						anthropicContent = append(anthropicContent, anthropic.ContentBlockParamUnion{
524							OfToolResult: &toolResultBlock,
525						})
526					}
527				}
528			}
529			messages = append(messages, anthropic.NewUserMessage(anthropicContent...))
530		case ai.MessageRoleAssistant:
531			var anthropicContent []anthropic.ContentBlockParamUnion
532			for _, msg := range block.Messages {
533				for i, part := range msg.Content {
534					isLastPart := i == len(msg.Content)-1
535					cacheControl := getCacheControl(part.Options())
536					if cacheControl == nil && isLastPart {
537						cacheControl = getCacheControl(msg.ProviderOptions)
538					}
539					switch part.GetType() {
540					case ai.ContentTypeText:
541						text, ok := ai.AsMessagePart[ai.TextPart](part)
542						if !ok {
543							continue
544						}
545						textBlock := &anthropic.TextBlockParam{
546							Text: text.Text,
547						}
548						if cacheControl != nil {
549							textBlock.CacheControl = anthropic.NewCacheControlEphemeralParam()
550						}
551						anthropicContent = append(anthropicContent, anthropic.ContentBlockParamUnion{
552							OfText: textBlock,
553						})
554					case ai.ContentTypeReasoning:
555						reasoning, ok := ai.AsMessagePart[ai.ReasoningPart](part)
556						if !ok {
557							continue
558						}
559						if !sendReasoningData {
560							warnings = append(warnings, ai.CallWarning{
561								Type:    "other",
562								Message: "sending reasoning content is disabled for this model",
563							})
564							continue
565						}
566						reasoningMetadata := getReasoningMetadata(part.Options())
567						if reasoningMetadata == nil {
568							warnings = append(warnings, ai.CallWarning{
569								Type:    "other",
570								Message: "unsupported reasoning metadata",
571							})
572							continue
573						}
574
575						if reasoningMetadata.Signature != "" {
576							anthropicContent = append(anthropicContent, anthropic.NewThinkingBlock(reasoningMetadata.Signature, reasoning.Text))
577						} else if reasoningMetadata.RedactedData != "" {
578							anthropicContent = append(anthropicContent, anthropic.NewRedactedThinkingBlock(reasoningMetadata.RedactedData))
579						} else {
580							warnings = append(warnings, ai.CallWarning{
581								Type:    "other",
582								Message: "unsupported reasoning metadata",
583							})
584							continue
585						}
586					case ai.ContentTypeToolCall:
587						toolCall, ok := ai.AsMessagePart[ai.ToolCallPart](part)
588						if !ok {
589							continue
590						}
591						if toolCall.ProviderExecuted {
592							// TODO: implement provider executed call
593							continue
594						}
595
596						var inputMap map[string]any
597						err := json.Unmarshal([]byte(toolCall.Input), &inputMap)
598						if err != nil {
599							continue
600						}
601						toolUseBlock := anthropic.NewToolUseBlock(toolCall.ToolCallID, inputMap, toolCall.ToolName)
602						if cacheControl != nil {
603							toolUseBlock.OfToolUse.CacheControl = anthropic.NewCacheControlEphemeralParam()
604						}
605						anthropicContent = append(anthropicContent, toolUseBlock)
606					case ai.ContentTypeToolResult:
607						// TODO: implement provider executed tool result
608					}
609				}
610			}
611			messages = append(messages, anthropic.NewAssistantMessage(anthropicContent...))
612		}
613	}
614	return systemBlocks, messages, warnings
615}
616
617func (o languageModel) handleError(err error) error {
618	var apiErr *anthropic.Error
619	if errors.As(err, &apiErr) {
620		requestDump := apiErr.DumpRequest(true)
621		responseDump := apiErr.DumpResponse(true)
622		headers := map[string]string{}
623		for k, h := range apiErr.Response.Header {
624			v := h[len(h)-1]
625			headers[strings.ToLower(k)] = v
626		}
627		return ai.NewAPICallError(
628			apiErr.Error(),
629			apiErr.Request.URL.String(),
630			string(requestDump),
631			apiErr.StatusCode,
632			headers,
633			string(responseDump),
634			apiErr,
635			false,
636		)
637	}
638	return err
639}
640
641func mapFinishReason(finishReason string) ai.FinishReason {
642	switch finishReason {
643	case "end", "stop_sequence":
644		return ai.FinishReasonStop
645	case "max_tokens":
646		return ai.FinishReasonLength
647	case "tool_use":
648		return ai.FinishReasonToolCalls
649	default:
650		return ai.FinishReasonUnknown
651	}
652}
653
654// Generate implements ai.LanguageModel.
655func (a languageModel) Generate(ctx context.Context, call ai.Call) (*ai.Response, error) {
656	params, warnings, err := a.prepareParams(call)
657	if err != nil {
658		return nil, err
659	}
660	response, err := a.client.Messages.New(ctx, *params)
661	if err != nil {
662		return nil, a.handleError(err)
663	}
664
665	var content []ai.Content
666	for _, block := range response.Content {
667		switch block.Type {
668		case "text":
669			text, ok := block.AsAny().(anthropic.TextBlock)
670			if !ok {
671				continue
672			}
673			content = append(content, ai.TextContent{
674				Text: text.Text,
675			})
676		case "thinking":
677			reasoning, ok := block.AsAny().(anthropic.ThinkingBlock)
678			if !ok {
679				continue
680			}
681			content = append(content, ai.ReasoningContent{
682				Text: reasoning.Thinking,
683				ProviderMetadata: map[string]map[string]any{
684					"anthropic": {
685						"signature": reasoning.Signature,
686					},
687				},
688			})
689		case "redacted_thinking":
690			reasoning, ok := block.AsAny().(anthropic.RedactedThinkingBlock)
691			if !ok {
692				continue
693			}
694			content = append(content, ai.ReasoningContent{
695				Text: "",
696				ProviderMetadata: map[string]map[string]any{
697					"anthropic": {
698						"redacted_data": reasoning.Data,
699					},
700				},
701			})
702		case "tool_use":
703			toolUse, ok := block.AsAny().(anthropic.ToolUseBlock)
704			if !ok {
705				continue
706			}
707			content = append(content, ai.ToolCallContent{
708				ToolCallID:       toolUse.ID,
709				ToolName:         toolUse.Name,
710				Input:            string(toolUse.Input),
711				ProviderExecuted: false,
712			})
713		}
714	}
715
716	return &ai.Response{
717		Content: content,
718		Usage: ai.Usage{
719			InputTokens:         response.Usage.InputTokens,
720			OutputTokens:        response.Usage.OutputTokens,
721			TotalTokens:         response.Usage.InputTokens + response.Usage.OutputTokens,
722			CacheCreationTokens: response.Usage.CacheCreationInputTokens,
723			CacheReadTokens:     response.Usage.CacheReadInputTokens,
724		},
725		FinishReason: mapFinishReason(string(response.StopReason)),
726		ProviderMetadata: ai.ProviderMetadata{
727			"anthropic": make(map[string]any),
728		},
729		Warnings: warnings,
730	}, nil
731}
732
733// Stream implements ai.LanguageModel.
734func (a languageModel) Stream(ctx context.Context, call ai.Call) (ai.StreamResponse, error) {
735	params, warnings, err := a.prepareParams(call)
736	if err != nil {
737		return nil, err
738	}
739
740	stream := a.client.Messages.NewStreaming(ctx, *params)
741	acc := anthropic.Message{}
742	return func(yield func(ai.StreamPart) bool) {
743		if len(warnings) > 0 {
744			if !yield(ai.StreamPart{
745				Type:     ai.StreamPartTypeWarnings,
746				Warnings: warnings,
747			}) {
748				return
749			}
750		}
751
752		for stream.Next() {
753			chunk := stream.Current()
754			_ = acc.Accumulate(chunk)
755			switch chunk.Type {
756			case "content_block_start":
757				contentBlockType := chunk.ContentBlock.Type
758				switch contentBlockType {
759				case "text":
760					if !yield(ai.StreamPart{
761						Type: ai.StreamPartTypeTextStart,
762						ID:   fmt.Sprintf("%d", chunk.Index),
763					}) {
764						return
765					}
766				case "thinking":
767					if !yield(ai.StreamPart{
768						Type: ai.StreamPartTypeReasoningStart,
769						ID:   fmt.Sprintf("%d", chunk.Index),
770					}) {
771						return
772					}
773				case "redacted_thinking":
774					if !yield(ai.StreamPart{
775						Type: ai.StreamPartTypeReasoningStart,
776						ID:   fmt.Sprintf("%d", chunk.Index),
777						ProviderMetadata: ai.ProviderMetadata{
778							"anthropic": {
779								"redacted_data": chunk.ContentBlock.Data,
780							},
781						},
782					}) {
783						return
784					}
785				case "tool_use":
786					if !yield(ai.StreamPart{
787						Type:          ai.StreamPartTypeToolInputStart,
788						ID:            chunk.ContentBlock.ID,
789						ToolCallName:  chunk.ContentBlock.Name,
790						ToolCallInput: "",
791					}) {
792						return
793					}
794				}
795			case "content_block_stop":
796				if len(acc.Content)-1 < int(chunk.Index) {
797					continue
798				}
799				contentBlock := acc.Content[int(chunk.Index)]
800				switch contentBlock.Type {
801				case "text":
802					if !yield(ai.StreamPart{
803						Type: ai.StreamPartTypeTextEnd,
804						ID:   fmt.Sprintf("%d", chunk.Index),
805					}) {
806						return
807					}
808				case "thinking":
809					if !yield(ai.StreamPart{
810						Type: ai.StreamPartTypeReasoningEnd,
811						ID:   fmt.Sprintf("%d", chunk.Index),
812					}) {
813						return
814					}
815				case "tool_use":
816					if !yield(ai.StreamPart{
817						Type: ai.StreamPartTypeToolInputEnd,
818						ID:   contentBlock.ID,
819					}) {
820						return
821					}
822					if !yield(ai.StreamPart{
823						Type:          ai.StreamPartTypeToolCall,
824						ID:            contentBlock.ID,
825						ToolCallName:  contentBlock.Name,
826						ToolCallInput: string(contentBlock.Input),
827					}) {
828						return
829					}
830				}
831			case "content_block_delta":
832				switch chunk.Delta.Type {
833				case "text_delta":
834					if !yield(ai.StreamPart{
835						Type:  ai.StreamPartTypeTextDelta,
836						ID:    fmt.Sprintf("%d", chunk.Index),
837						Delta: chunk.Delta.Text,
838					}) {
839						return
840					}
841				case "thinking_delta":
842					if !yield(ai.StreamPart{
843						Type:  ai.StreamPartTypeReasoningDelta,
844						ID:    fmt.Sprintf("%d", chunk.Index),
845						Delta: chunk.Delta.Text,
846					}) {
847						return
848					}
849				case "signature_delta":
850					if !yield(ai.StreamPart{
851						Type: ai.StreamPartTypeReasoningDelta,
852						ID:   fmt.Sprintf("%d", chunk.Index),
853						ProviderMetadata: ai.ProviderMetadata{
854							"anthropic": {
855								"signature": chunk.Delta.Signature,
856							},
857						},
858					}) {
859						return
860					}
861				case "input_json_delta":
862					if len(acc.Content)-1 < int(chunk.Index) {
863						continue
864					}
865					contentBlock := acc.Content[int(chunk.Index)]
866					if !yield(ai.StreamPart{
867						Type:          ai.StreamPartTypeToolInputDelta,
868						ID:            contentBlock.ID,
869						ToolCallInput: chunk.Delta.PartialJSON,
870					}) {
871						return
872					}
873				}
874			case "message_stop":
875			}
876		}
877
878		err := stream.Err()
879		if err == nil || errors.Is(err, io.EOF) {
880			yield(ai.StreamPart{
881				Type:         ai.StreamPartTypeFinish,
882				ID:           acc.ID,
883				FinishReason: mapFinishReason(string(acc.StopReason)),
884				Usage: ai.Usage{
885					InputTokens:         acc.Usage.InputTokens,
886					OutputTokens:        acc.Usage.OutputTokens,
887					TotalTokens:         acc.Usage.InputTokens + acc.Usage.OutputTokens,
888					CacheCreationTokens: acc.Usage.CacheCreationInputTokens,
889					CacheReadTokens:     acc.Usage.CacheReadInputTokens,
890				},
891				ProviderMetadata: ai.ProviderMetadata{
892					"anthropic": make(map[string]any),
893				},
894			})
895			return
896		} else {
897			yield(ai.StreamPart{
898				Type:  ai.StreamPartTypeError,
899				Error: a.handleError(err),
900			})
901			return
902		}
903	}, nil
904}