1package anthropic
2
3import (
4 "cmp"
5 "context"
6 "encoding/base64"
7 "encoding/json"
8 "errors"
9 "fmt"
10 "io"
11 "maps"
12 "strings"
13
14 "github.com/anthropics/anthropic-sdk-go"
15 "github.com/anthropics/anthropic-sdk-go/option"
16 "github.com/anthropics/anthropic-sdk-go/packages/param"
17 "github.com/charmbracelet/ai/ai"
18)
19
20type options struct {
21 baseURL string
22 apiKey string
23 name string
24 headers map[string]string
25 client option.HTTPClient
26}
27
28type provider struct {
29 options options
30}
31
32type Option = func(*options)
33
34func New(opts ...Option) ai.Provider {
35 options := options{
36 headers: map[string]string{},
37 }
38 for _, o := range opts {
39 o(&options)
40 }
41
42 options.baseURL = cmp.Or(options.baseURL, "https://api.anthropic.com")
43 options.name = cmp.Or(options.name, "anthropic")
44
45 return &provider{options: options}
46}
47
48func WithBaseURL(baseURL string) Option {
49 return func(o *options) {
50 o.baseURL = baseURL
51 }
52}
53
54func WithAPIKey(apiKey string) Option {
55 return func(o *options) {
56 o.apiKey = apiKey
57 }
58}
59
60func WithName(name string) Option {
61 return func(o *options) {
62 o.name = name
63 }
64}
65
66func WithHeaders(headers map[string]string) Option {
67 return func(o *options) {
68 maps.Copy(o.headers, headers)
69 }
70}
71
72func WithHTTPClient(client option.HTTPClient) Option {
73 return func(o *options) {
74 o.client = client
75 }
76}
77
78func (a *provider) LanguageModel(modelID string) (ai.LanguageModel, error) {
79 anthropicClientOptions := []option.RequestOption{}
80 if a.options.apiKey != "" {
81 anthropicClientOptions = append(anthropicClientOptions, option.WithAPIKey(a.options.apiKey))
82 }
83 if a.options.baseURL != "" {
84 anthropicClientOptions = append(anthropicClientOptions, option.WithBaseURL(a.options.baseURL))
85 }
86
87 for key, value := range a.options.headers {
88 anthropicClientOptions = append(anthropicClientOptions, option.WithHeader(key, value))
89 }
90
91 if a.options.client != nil {
92 anthropicClientOptions = append(anthropicClientOptions, option.WithHTTPClient(a.options.client))
93 }
94 return languageModel{
95 modelID: modelID,
96 provider: fmt.Sprintf("%s.messages", a.options.name),
97 options: a.options,
98 client: anthropic.NewClient(anthropicClientOptions...),
99 }, nil
100}
101
102type languageModel struct {
103 provider string
104 modelID string
105 client anthropic.Client
106 options options
107}
108
109// Model implements ai.LanguageModel.
110func (a languageModel) Model() string {
111 return a.modelID
112}
113
114// Provider implements ai.LanguageModel.
115func (a languageModel) Provider() string {
116 return a.provider
117}
118
119func (a languageModel) prepareParams(call ai.Call) (*anthropic.MessageNewParams, []ai.CallWarning, error) {
120 params := &anthropic.MessageNewParams{}
121 providerOptions := &ProviderOptions{}
122 if v, ok := call.ProviderOptions["anthropic"]; ok {
123 providerOptions, ok = v.(*ProviderOptions)
124 if !ok {
125 return nil, nil, ai.NewInvalidArgumentError("providerOptions", "anthropic provider options should be *anthropic.ProviderOptions", nil)
126 }
127 }
128 sendReasoning := true
129 if providerOptions.SendReasoning != nil {
130 sendReasoning = *providerOptions.SendReasoning
131 }
132 systemBlocks, messages, warnings := toPrompt(call.Prompt, sendReasoning)
133
134 if call.FrequencyPenalty != nil {
135 warnings = append(warnings, ai.CallWarning{
136 Type: ai.CallWarningTypeUnsupportedSetting,
137 Setting: "FrequencyPenalty",
138 })
139 }
140 if call.PresencePenalty != nil {
141 warnings = append(warnings, ai.CallWarning{
142 Type: ai.CallWarningTypeUnsupportedSetting,
143 Setting: "PresencePenalty",
144 })
145 }
146
147 params.System = systemBlocks
148 params.Messages = messages
149 params.Model = anthropic.Model(a.modelID)
150 params.MaxTokens = 4096
151
152 if call.MaxOutputTokens != nil {
153 params.MaxTokens = *call.MaxOutputTokens
154 }
155
156 if call.Temperature != nil {
157 params.Temperature = param.NewOpt(*call.Temperature)
158 }
159 if call.TopK != nil {
160 params.TopK = param.NewOpt(*call.TopK)
161 }
162 if call.TopP != nil {
163 params.TopP = param.NewOpt(*call.TopP)
164 }
165
166 isThinking := false
167 var thinkingBudget int64
168 if providerOptions.Thinking != nil {
169 isThinking = true
170 thinkingBudget = providerOptions.Thinking.BudgetTokens
171 }
172 if isThinking {
173 if thinkingBudget == 0 {
174 return nil, nil, ai.NewUnsupportedFunctionalityError("thinking requires budget", "")
175 }
176 params.Thinking = anthropic.ThinkingConfigParamOfEnabled(thinkingBudget)
177 if call.Temperature != nil {
178 params.Temperature = param.Opt[float64]{}
179 warnings = append(warnings, ai.CallWarning{
180 Type: ai.CallWarningTypeUnsupportedSetting,
181 Setting: "temperature",
182 Details: "temperature is not supported when thinking is enabled",
183 })
184 }
185 if call.TopP != nil {
186 params.TopP = param.Opt[float64]{}
187 warnings = append(warnings, ai.CallWarning{
188 Type: ai.CallWarningTypeUnsupportedSetting,
189 Setting: "TopP",
190 Details: "TopP is not supported when thinking is enabled",
191 })
192 }
193 if call.TopK != nil {
194 params.TopK = param.Opt[int64]{}
195 warnings = append(warnings, ai.CallWarning{
196 Type: ai.CallWarningTypeUnsupportedSetting,
197 Setting: "TopK",
198 Details: "TopK is not supported when thinking is enabled",
199 })
200 }
201 params.MaxTokens = params.MaxTokens + thinkingBudget
202 }
203
204 if len(call.Tools) > 0 {
205 disableParallelToolUse := false
206 if providerOptions.DisableParallelToolUse != nil {
207 disableParallelToolUse = *providerOptions.DisableParallelToolUse
208 }
209 tools, toolChoice, toolWarnings := toTools(call.Tools, call.ToolChoice, disableParallelToolUse)
210 params.Tools = tools
211 if toolChoice != nil {
212 params.ToolChoice = *toolChoice
213 }
214 warnings = append(warnings, toolWarnings...)
215 }
216
217 return params, warnings, nil
218}
219
220func getCacheControl(providerOptions ai.ProviderOptions) *CacheControl {
221 if anthropicOptions, ok := providerOptions["anthropic"]; ok {
222 if options, ok := anthropicOptions.(*CacheControl); ok {
223 return options
224 }
225 }
226 return nil
227}
228
229func getReasoningMetadata(providerOptions ai.ProviderOptions) *ReasoningMetadata {
230 if anthropicOptions, ok := providerOptions["anthropic"]; ok {
231 if reasoning, ok := anthropicOptions.(*ReasoningMetadata); ok {
232 return reasoning
233 }
234 }
235 return nil
236}
237
238type messageBlock struct {
239 Role ai.MessageRole
240 Messages []ai.Message
241}
242
243func groupIntoBlocks(prompt ai.Prompt) []*messageBlock {
244 var blocks []*messageBlock
245
246 var currentBlock *messageBlock
247
248 for _, msg := range prompt {
249 switch msg.Role {
250 case ai.MessageRoleSystem:
251 if currentBlock == nil || currentBlock.Role != ai.MessageRoleSystem {
252 currentBlock = &messageBlock{
253 Role: ai.MessageRoleSystem,
254 Messages: []ai.Message{},
255 }
256 blocks = append(blocks, currentBlock)
257 }
258 currentBlock.Messages = append(currentBlock.Messages, msg)
259 case ai.MessageRoleUser:
260 if currentBlock == nil || currentBlock.Role != ai.MessageRoleUser {
261 currentBlock = &messageBlock{
262 Role: ai.MessageRoleUser,
263 Messages: []ai.Message{},
264 }
265 blocks = append(blocks, currentBlock)
266 }
267 currentBlock.Messages = append(currentBlock.Messages, msg)
268 case ai.MessageRoleAssistant:
269 if currentBlock == nil || currentBlock.Role != ai.MessageRoleAssistant {
270 currentBlock = &messageBlock{
271 Role: ai.MessageRoleAssistant,
272 Messages: []ai.Message{},
273 }
274 blocks = append(blocks, currentBlock)
275 }
276 currentBlock.Messages = append(currentBlock.Messages, msg)
277 case ai.MessageRoleTool:
278 if currentBlock == nil || currentBlock.Role != ai.MessageRoleUser {
279 currentBlock = &messageBlock{
280 Role: ai.MessageRoleUser,
281 Messages: []ai.Message{},
282 }
283 blocks = append(blocks, currentBlock)
284 }
285 currentBlock.Messages = append(currentBlock.Messages, msg)
286 }
287 }
288 return blocks
289}
290
291func toTools(tools []ai.Tool, toolChoice *ai.ToolChoice, disableParallelToolCalls bool) (anthropicTools []anthropic.ToolUnionParam, anthropicToolChoice *anthropic.ToolChoiceUnionParam, warnings []ai.CallWarning) {
292 for _, tool := range tools {
293 if tool.GetType() == ai.ToolTypeFunction {
294 ft, ok := tool.(ai.FunctionTool)
295 if !ok {
296 continue
297 }
298 required := []string{}
299 var properties any
300 if props, ok := ft.InputSchema["properties"]; ok {
301 properties = props
302 }
303 if req, ok := ft.InputSchema["required"]; ok {
304 if reqArr, ok := req.([]string); ok {
305 required = reqArr
306 }
307 }
308 cacheControl := getCacheControl(ft.ProviderOptions)
309
310 anthropicTool := anthropic.ToolParam{
311 Name: ft.Name,
312 Description: anthropic.String(ft.Description),
313 InputSchema: anthropic.ToolInputSchemaParam{
314 Properties: properties,
315 Required: required,
316 },
317 }
318 if cacheControl != nil {
319 anthropicTool.CacheControl = anthropic.NewCacheControlEphemeralParam()
320 }
321 anthropicTools = append(anthropicTools, anthropic.ToolUnionParam{OfTool: &anthropicTool})
322 continue
323 }
324 // TODO: handle provider tool calls
325 warnings = append(warnings, ai.CallWarning{
326 Type: ai.CallWarningTypeUnsupportedTool,
327 Tool: tool,
328 Message: "tool is not supported",
329 })
330 }
331 if toolChoice == nil {
332 if disableParallelToolCalls {
333 anthropicToolChoice = &anthropic.ToolChoiceUnionParam{
334 OfAuto: &anthropic.ToolChoiceAutoParam{
335 Type: "auto",
336 DisableParallelToolUse: param.NewOpt(disableParallelToolCalls),
337 },
338 }
339 }
340 return anthropicTools, anthropicToolChoice, warnings
341 }
342
343 switch *toolChoice {
344 case ai.ToolChoiceAuto:
345 anthropicToolChoice = &anthropic.ToolChoiceUnionParam{
346 OfAuto: &anthropic.ToolChoiceAutoParam{
347 Type: "auto",
348 DisableParallelToolUse: param.NewOpt(disableParallelToolCalls),
349 },
350 }
351 case ai.ToolChoiceRequired:
352 anthropicToolChoice = &anthropic.ToolChoiceUnionParam{
353 OfAny: &anthropic.ToolChoiceAnyParam{
354 Type: "any",
355 DisableParallelToolUse: param.NewOpt(disableParallelToolCalls),
356 },
357 }
358 case ai.ToolChoiceNone:
359 return anthropicTools, anthropicToolChoice, warnings
360 default:
361 anthropicToolChoice = &anthropic.ToolChoiceUnionParam{
362 OfTool: &anthropic.ToolChoiceToolParam{
363 Type: "tool",
364 Name: string(*toolChoice),
365 DisableParallelToolUse: param.NewOpt(disableParallelToolCalls),
366 },
367 }
368 }
369 return anthropicTools, anthropicToolChoice, warnings
370}
371
372func toPrompt(prompt ai.Prompt, sendReasoningData bool) ([]anthropic.TextBlockParam, []anthropic.MessageParam, []ai.CallWarning) {
373 var systemBlocks []anthropic.TextBlockParam
374 var messages []anthropic.MessageParam
375 var warnings []ai.CallWarning
376
377 blocks := groupIntoBlocks(prompt)
378 finishedSystemBlock := false
379 for _, block := range blocks {
380 switch block.Role {
381 case ai.MessageRoleSystem:
382 if finishedSystemBlock {
383 // skip multiple system messages that are separated by user/assistant messages
384 // TODO: see if we need to send error here?
385 continue
386 }
387 finishedSystemBlock = true
388 for _, msg := range block.Messages {
389 for _, part := range msg.Content {
390 cacheControl := getCacheControl(part.Options())
391 text, ok := ai.AsMessagePart[ai.TextPart](part)
392 if !ok {
393 continue
394 }
395 textBlock := anthropic.TextBlockParam{
396 Text: text.Text,
397 }
398 if cacheControl != nil {
399 textBlock.CacheControl = anthropic.NewCacheControlEphemeralParam()
400 }
401 systemBlocks = append(systemBlocks, textBlock)
402 }
403 }
404
405 case ai.MessageRoleUser:
406 var anthropicContent []anthropic.ContentBlockParamUnion
407 for _, msg := range block.Messages {
408 if msg.Role == ai.MessageRoleUser {
409 for i, part := range msg.Content {
410 isLastPart := i == len(msg.Content)-1
411 cacheControl := getCacheControl(part.Options())
412 if cacheControl == nil && isLastPart {
413 cacheControl = getCacheControl(msg.ProviderOptions)
414 }
415 switch part.GetType() {
416 case ai.ContentTypeText:
417 text, ok := ai.AsMessagePart[ai.TextPart](part)
418 if !ok {
419 continue
420 }
421 textBlock := &anthropic.TextBlockParam{
422 Text: text.Text,
423 }
424 if cacheControl != nil {
425 textBlock.CacheControl = anthropic.NewCacheControlEphemeralParam()
426 }
427 anthropicContent = append(anthropicContent, anthropic.ContentBlockParamUnion{
428 OfText: textBlock,
429 })
430 case ai.ContentTypeFile:
431 file, ok := ai.AsMessagePart[ai.FilePart](part)
432 if !ok {
433 continue
434 }
435 // TODO: handle other file types
436 if !strings.HasPrefix(file.MediaType, "image/") {
437 continue
438 }
439
440 base64Encoded := base64.StdEncoding.EncodeToString(file.Data)
441 imageBlock := anthropic.NewImageBlockBase64(file.MediaType, base64Encoded)
442 if cacheControl != nil {
443 imageBlock.OfImage.CacheControl = anthropic.NewCacheControlEphemeralParam()
444 }
445 anthropicContent = append(anthropicContent, imageBlock)
446 }
447 }
448 } else if msg.Role == ai.MessageRoleTool {
449 for i, part := range msg.Content {
450 isLastPart := i == len(msg.Content)-1
451 cacheControl := getCacheControl(part.Options())
452 if cacheControl == nil && isLastPart {
453 cacheControl = getCacheControl(msg.ProviderOptions)
454 }
455 result, ok := ai.AsMessagePart[ai.ToolResultPart](part)
456 if !ok {
457 continue
458 }
459 toolResultBlock := anthropic.ToolResultBlockParam{
460 ToolUseID: result.ToolCallID,
461 }
462 switch result.Output.GetType() {
463 case ai.ToolResultContentTypeText:
464 content, ok := ai.AsToolResultOutputType[ai.ToolResultOutputContentText](result.Output)
465 if !ok {
466 continue
467 }
468 toolResultBlock.Content = []anthropic.ToolResultBlockParamContentUnion{
469 {
470 OfText: &anthropic.TextBlockParam{
471 Text: content.Text,
472 },
473 },
474 }
475 case ai.ToolResultContentTypeMedia:
476 content, ok := ai.AsToolResultOutputType[ai.ToolResultOutputContentMedia](result.Output)
477 if !ok {
478 continue
479 }
480 toolResultBlock.Content = []anthropic.ToolResultBlockParamContentUnion{
481 {
482 OfImage: anthropic.NewImageBlockBase64(content.MediaType, content.Data).OfImage,
483 },
484 }
485 case ai.ToolResultContentTypeError:
486 content, ok := ai.AsToolResultOutputType[ai.ToolResultOutputContentError](result.Output)
487 if !ok {
488 continue
489 }
490 toolResultBlock.Content = []anthropic.ToolResultBlockParamContentUnion{
491 {
492 OfText: &anthropic.TextBlockParam{
493 Text: content.Error.Error(),
494 },
495 },
496 }
497 toolResultBlock.IsError = param.NewOpt(true)
498 }
499 if cacheControl != nil {
500 toolResultBlock.CacheControl = anthropic.NewCacheControlEphemeralParam()
501 }
502 anthropicContent = append(anthropicContent, anthropic.ContentBlockParamUnion{
503 OfToolResult: &toolResultBlock,
504 })
505 }
506 }
507 }
508 messages = append(messages, anthropic.NewUserMessage(anthropicContent...))
509 case ai.MessageRoleAssistant:
510 var anthropicContent []anthropic.ContentBlockParamUnion
511 for _, msg := range block.Messages {
512 for i, part := range msg.Content {
513 isLastPart := i == len(msg.Content)-1
514 cacheControl := getCacheControl(part.Options())
515 if cacheControl == nil && isLastPart {
516 cacheControl = getCacheControl(msg.ProviderOptions)
517 }
518 switch part.GetType() {
519 case ai.ContentTypeText:
520 text, ok := ai.AsMessagePart[ai.TextPart](part)
521 if !ok {
522 continue
523 }
524 textBlock := &anthropic.TextBlockParam{
525 Text: text.Text,
526 }
527 if cacheControl != nil {
528 textBlock.CacheControl = anthropic.NewCacheControlEphemeralParam()
529 }
530 anthropicContent = append(anthropicContent, anthropic.ContentBlockParamUnion{
531 OfText: textBlock,
532 })
533 case ai.ContentTypeReasoning:
534 reasoning, ok := ai.AsMessagePart[ai.ReasoningPart](part)
535 if !ok {
536 continue
537 }
538 if !sendReasoningData {
539 warnings = append(warnings, ai.CallWarning{
540 Type: "other",
541 Message: "sending reasoning content is disabled for this model",
542 })
543 continue
544 }
545 reasoningMetadata := getReasoningMetadata(part.Options())
546 if reasoningMetadata == nil {
547 warnings = append(warnings, ai.CallWarning{
548 Type: "other",
549 Message: "unsupported reasoning metadata",
550 })
551 continue
552 }
553
554 if reasoningMetadata.Signature != "" {
555 anthropicContent = append(anthropicContent, anthropic.NewThinkingBlock(reasoningMetadata.Signature, reasoning.Text))
556 } else if reasoningMetadata.RedactedData != "" {
557 anthropicContent = append(anthropicContent, anthropic.NewRedactedThinkingBlock(reasoningMetadata.RedactedData))
558 } else {
559 warnings = append(warnings, ai.CallWarning{
560 Type: "other",
561 Message: "unsupported reasoning metadata",
562 })
563 continue
564 }
565 case ai.ContentTypeToolCall:
566 toolCall, ok := ai.AsMessagePart[ai.ToolCallPart](part)
567 if !ok {
568 continue
569 }
570 if toolCall.ProviderExecuted {
571 // TODO: implement provider executed call
572 continue
573 }
574
575 var inputMap map[string]any
576 err := json.Unmarshal([]byte(toolCall.Input), &inputMap)
577 if err != nil {
578 continue
579 }
580 toolUseBlock := anthropic.NewToolUseBlock(toolCall.ToolCallID, inputMap, toolCall.ToolName)
581 if cacheControl != nil {
582 toolUseBlock.OfToolUse.CacheControl = anthropic.NewCacheControlEphemeralParam()
583 }
584 anthropicContent = append(anthropicContent, toolUseBlock)
585 case ai.ContentTypeToolResult:
586 // TODO: implement provider executed tool result
587 }
588 }
589 }
590 messages = append(messages, anthropic.NewAssistantMessage(anthropicContent...))
591 }
592 }
593 return systemBlocks, messages, warnings
594}
595
596func (o languageModel) handleError(err error) error {
597 var apiErr *anthropic.Error
598 if errors.As(err, &apiErr) {
599 requestDump := apiErr.DumpRequest(true)
600 responseDump := apiErr.DumpResponse(true)
601 headers := map[string]string{}
602 for k, h := range apiErr.Response.Header {
603 v := h[len(h)-1]
604 headers[strings.ToLower(k)] = v
605 }
606 return ai.NewAPICallError(
607 apiErr.Error(),
608 apiErr.Request.URL.String(),
609 string(requestDump),
610 apiErr.StatusCode,
611 headers,
612 string(responseDump),
613 apiErr,
614 false,
615 )
616 }
617 return err
618}
619
620func mapFinishReason(finishReason string) ai.FinishReason {
621 switch finishReason {
622 case "end_turn", "pause_turn", "stop_sequence":
623 return ai.FinishReasonStop
624 case "max_tokens":
625 return ai.FinishReasonLength
626 case "tool_use":
627 return ai.FinishReasonToolCalls
628 default:
629 return ai.FinishReasonUnknown
630 }
631}
632
633// Generate implements ai.LanguageModel.
634func (a languageModel) Generate(ctx context.Context, call ai.Call) (*ai.Response, error) {
635 params, warnings, err := a.prepareParams(call)
636 if err != nil {
637 return nil, err
638 }
639 response, err := a.client.Messages.New(ctx, *params)
640 if err != nil {
641 return nil, a.handleError(err)
642 }
643
644 var content []ai.Content
645 for _, block := range response.Content {
646 switch block.Type {
647 case "text":
648 text, ok := block.AsAny().(anthropic.TextBlock)
649 if !ok {
650 continue
651 }
652 content = append(content, ai.TextContent{
653 Text: text.Text,
654 })
655 case "thinking":
656 reasoning, ok := block.AsAny().(anthropic.ThinkingBlock)
657 if !ok {
658 continue
659 }
660 content = append(content, ai.ReasoningContent{
661 Text: reasoning.Thinking,
662 ProviderMetadata: map[string]any{
663 "anthropic": &ReasoningMetadata{
664 Signature: reasoning.Signature,
665 },
666 },
667 })
668 case "redacted_thinking":
669 reasoning, ok := block.AsAny().(anthropic.RedactedThinkingBlock)
670 if !ok {
671 continue
672 }
673 content = append(content, ai.ReasoningContent{
674 Text: "",
675 ProviderMetadata: map[string]any{
676 "anthropic": &ReasoningMetadata{
677 RedactedData: reasoning.Data,
678 },
679 },
680 })
681 case "tool_use":
682 toolUse, ok := block.AsAny().(anthropic.ToolUseBlock)
683 if !ok {
684 continue
685 }
686 content = append(content, ai.ToolCallContent{
687 ToolCallID: toolUse.ID,
688 ToolName: toolUse.Name,
689 Input: string(toolUse.Input),
690 ProviderExecuted: false,
691 })
692 }
693 }
694
695 return &ai.Response{
696 Content: content,
697 Usage: ai.Usage{
698 InputTokens: response.Usage.InputTokens,
699 OutputTokens: response.Usage.OutputTokens,
700 TotalTokens: response.Usage.InputTokens + response.Usage.OutputTokens,
701 CacheCreationTokens: response.Usage.CacheCreationInputTokens,
702 CacheReadTokens: response.Usage.CacheReadInputTokens,
703 },
704 FinishReason: mapFinishReason(string(response.StopReason)),
705 ProviderMetadata: ai.ProviderMetadata{},
706 Warnings: warnings,
707 }, nil
708}
709
710// Stream implements ai.LanguageModel.
711func (a languageModel) Stream(ctx context.Context, call ai.Call) (ai.StreamResponse, error) {
712 params, warnings, err := a.prepareParams(call)
713 if err != nil {
714 return nil, err
715 }
716
717 stream := a.client.Messages.NewStreaming(ctx, *params)
718 acc := anthropic.Message{}
719 return func(yield func(ai.StreamPart) bool) {
720 if len(warnings) > 0 {
721 if !yield(ai.StreamPart{
722 Type: ai.StreamPartTypeWarnings,
723 Warnings: warnings,
724 }) {
725 return
726 }
727 }
728
729 for stream.Next() {
730 chunk := stream.Current()
731 _ = acc.Accumulate(chunk)
732 switch chunk.Type {
733 case "content_block_start":
734 contentBlockType := chunk.ContentBlock.Type
735 switch contentBlockType {
736 case "text":
737 if !yield(ai.StreamPart{
738 Type: ai.StreamPartTypeTextStart,
739 ID: fmt.Sprintf("%d", chunk.Index),
740 }) {
741 return
742 }
743 case "thinking":
744 if !yield(ai.StreamPart{
745 Type: ai.StreamPartTypeReasoningStart,
746 ID: fmt.Sprintf("%d", chunk.Index),
747 }) {
748 return
749 }
750 case "redacted_thinking":
751 if !yield(ai.StreamPart{
752 Type: ai.StreamPartTypeReasoningStart,
753 ID: fmt.Sprintf("%d", chunk.Index),
754 ProviderMetadata: ai.ProviderMetadata{
755 "anthropic": &ReasoningMetadata{
756 RedactedData: chunk.ContentBlock.Data,
757 },
758 },
759 }) {
760 return
761 }
762 case "tool_use":
763 if !yield(ai.StreamPart{
764 Type: ai.StreamPartTypeToolInputStart,
765 ID: chunk.ContentBlock.ID,
766 ToolCallName: chunk.ContentBlock.Name,
767 ToolCallInput: "",
768 }) {
769 return
770 }
771 }
772 case "content_block_stop":
773 if len(acc.Content)-1 < int(chunk.Index) {
774 continue
775 }
776 contentBlock := acc.Content[int(chunk.Index)]
777 switch contentBlock.Type {
778 case "text":
779 if !yield(ai.StreamPart{
780 Type: ai.StreamPartTypeTextEnd,
781 ID: fmt.Sprintf("%d", chunk.Index),
782 }) {
783 return
784 }
785 case "thinking":
786 if !yield(ai.StreamPart{
787 Type: ai.StreamPartTypeReasoningEnd,
788 ID: fmt.Sprintf("%d", chunk.Index),
789 }) {
790 return
791 }
792 case "tool_use":
793 if !yield(ai.StreamPart{
794 Type: ai.StreamPartTypeToolInputEnd,
795 ID: contentBlock.ID,
796 }) {
797 return
798 }
799 if !yield(ai.StreamPart{
800 Type: ai.StreamPartTypeToolCall,
801 ID: contentBlock.ID,
802 ToolCallName: contentBlock.Name,
803 ToolCallInput: string(contentBlock.Input),
804 }) {
805 return
806 }
807 }
808 case "content_block_delta":
809 switch chunk.Delta.Type {
810 case "text_delta":
811 if !yield(ai.StreamPart{
812 Type: ai.StreamPartTypeTextDelta,
813 ID: fmt.Sprintf("%d", chunk.Index),
814 Delta: chunk.Delta.Text,
815 }) {
816 return
817 }
818 case "thinking_delta":
819 if !yield(ai.StreamPart{
820 Type: ai.StreamPartTypeReasoningDelta,
821 ID: fmt.Sprintf("%d", chunk.Index),
822 Delta: chunk.Delta.Thinking,
823 }) {
824 return
825 }
826 case "signature_delta":
827 if !yield(ai.StreamPart{
828 Type: ai.StreamPartTypeReasoningDelta,
829 ID: fmt.Sprintf("%d", chunk.Index),
830 ProviderMetadata: ai.ProviderMetadata{
831 "anthropic": &ReasoningMetadata{
832 Signature: chunk.Delta.Signature,
833 },
834 },
835 }) {
836 return
837 }
838 case "input_json_delta":
839 if len(acc.Content)-1 < int(chunk.Index) {
840 continue
841 }
842 contentBlock := acc.Content[int(chunk.Index)]
843 if !yield(ai.StreamPart{
844 Type: ai.StreamPartTypeToolInputDelta,
845 ID: contentBlock.ID,
846 ToolCallInput: chunk.Delta.PartialJSON,
847 }) {
848 return
849 }
850 }
851 case "message_stop":
852 }
853 }
854
855 err := stream.Err()
856 if err == nil || errors.Is(err, io.EOF) {
857 yield(ai.StreamPart{
858 Type: ai.StreamPartTypeFinish,
859 ID: acc.ID,
860 FinishReason: mapFinishReason(string(acc.StopReason)),
861 Usage: ai.Usage{
862 InputTokens: acc.Usage.InputTokens,
863 OutputTokens: acc.Usage.OutputTokens,
864 TotalTokens: acc.Usage.InputTokens + acc.Usage.OutputTokens,
865 CacheCreationTokens: acc.Usage.CacheCreationInputTokens,
866 CacheReadTokens: acc.Usage.CacheReadInputTokens,
867 },
868 ProviderMetadata: ai.ProviderMetadata{
869 "anthropic": make(map[string]any),
870 },
871 })
872 return
873 } else {
874 yield(ai.StreamPart{
875 Type: ai.StreamPartTypeError,
876 Error: a.handleError(err),
877 })
878 return
879 }
880 }, nil
881}