anthropic.rs

  1use std::collections::HashMap;
  2use std::sync::Mutex;
  3
  4use serde::{Deserialize, Serialize};
  5use zed_extension_api::http_client::{HttpMethod, HttpRequest, HttpResponseStream, RedirectPolicy};
  6use zed_extension_api::{self as zed, *};
  7
  8struct AnthropicProvider {
  9    streams: Mutex<HashMap<String, StreamState>>,
 10    next_stream_id: Mutex<u64>,
 11}
 12
 13struct StreamState {
 14    response_stream: Option<HttpResponseStream>,
 15    buffer: String,
 16    started: bool,
 17    current_tool_use: Option<ToolUseState>,
 18    stop_reason: Option<LlmStopReason>,
 19    pending_signature: Option<String>,
 20}
 21
 22struct ToolUseState {
 23    id: String,
 24    name: String,
 25    input_json: String,
 26}
 27
 28struct ModelDefinition {
 29    real_id: &'static str,
 30    display_name: &'static str,
 31    max_tokens: u64,
 32    max_output_tokens: u64,
 33    supports_images: bool,
 34    supports_thinking: bool,
 35    is_default: bool,
 36    is_default_fast: bool,
 37}
 38
 39const MODELS: &[ModelDefinition] = &[
 40    ModelDefinition {
 41        real_id: "claude-opus-4-5-20251101",
 42        display_name: "Claude Opus 4.5",
 43        max_tokens: 200_000,
 44        max_output_tokens: 8_192,
 45        supports_images: true,
 46        supports_thinking: false,
 47        is_default: false,
 48        is_default_fast: false,
 49    },
 50    ModelDefinition {
 51        real_id: "claude-opus-4-5-20251101",
 52        display_name: "Claude Opus 4.5 Thinking",
 53        max_tokens: 200_000,
 54        max_output_tokens: 8_192,
 55        supports_images: true,
 56        supports_thinking: true,
 57        is_default: false,
 58        is_default_fast: false,
 59    },
 60    ModelDefinition {
 61        real_id: "claude-sonnet-4-5-20250929",
 62        display_name: "Claude Sonnet 4.5",
 63        max_tokens: 200_000,
 64        max_output_tokens: 8_192,
 65        supports_images: true,
 66        supports_thinking: false,
 67        is_default: true,
 68        is_default_fast: false,
 69    },
 70    ModelDefinition {
 71        real_id: "claude-sonnet-4-5-20250929",
 72        display_name: "Claude Sonnet 4.5 Thinking",
 73        max_tokens: 200_000,
 74        max_output_tokens: 8_192,
 75        supports_images: true,
 76        supports_thinking: true,
 77        is_default: false,
 78        is_default_fast: false,
 79    },
 80    ModelDefinition {
 81        real_id: "claude-sonnet-4-20250514",
 82        display_name: "Claude Sonnet 4",
 83        max_tokens: 200_000,
 84        max_output_tokens: 8_192,
 85        supports_images: true,
 86        supports_thinking: false,
 87        is_default: false,
 88        is_default_fast: false,
 89    },
 90    ModelDefinition {
 91        real_id: "claude-sonnet-4-20250514",
 92        display_name: "Claude Sonnet 4 Thinking",
 93        max_tokens: 200_000,
 94        max_output_tokens: 8_192,
 95        supports_images: true,
 96        supports_thinking: true,
 97        is_default: false,
 98        is_default_fast: false,
 99    },
100    ModelDefinition {
101        real_id: "claude-haiku-4-5-20251001",
102        display_name: "Claude Haiku 4.5",
103        max_tokens: 200_000,
104        max_output_tokens: 64_000,
105        supports_images: true,
106        supports_thinking: false,
107        is_default: false,
108        is_default_fast: true,
109    },
110    ModelDefinition {
111        real_id: "claude-haiku-4-5-20251001",
112        display_name: "Claude Haiku 4.5 Thinking",
113        max_tokens: 200_000,
114        max_output_tokens: 64_000,
115        supports_images: true,
116        supports_thinking: true,
117        is_default: false,
118        is_default_fast: false,
119    },
120    ModelDefinition {
121        real_id: "claude-3-5-sonnet-latest",
122        display_name: "Claude 3.5 Sonnet",
123        max_tokens: 200_000,
124        max_output_tokens: 8_192,
125        supports_images: true,
126        supports_thinking: false,
127        is_default: false,
128        is_default_fast: false,
129    },
130    ModelDefinition {
131        real_id: "claude-3-5-haiku-latest",
132        display_name: "Claude 3.5 Haiku",
133        max_tokens: 200_000,
134        max_output_tokens: 8_192,
135        supports_images: true,
136        supports_thinking: false,
137        is_default: false,
138        is_default_fast: false,
139    },
140];
141
142fn get_model_definition(display_name: &str) -> Option<&'static ModelDefinition> {
143    MODELS.iter().find(|m| m.display_name == display_name)
144}
145
146// Anthropic API Request Types
147
148#[derive(Serialize)]
149struct AnthropicRequest {
150    model: String,
151    max_tokens: u64,
152    messages: Vec<AnthropicMessage>,
153    #[serde(skip_serializing_if = "Option::is_none")]
154    system: Option<String>,
155    #[serde(skip_serializing_if = "Option::is_none")]
156    thinking: Option<AnthropicThinking>,
157    #[serde(skip_serializing_if = "Vec::is_empty")]
158    tools: Vec<AnthropicTool>,
159    #[serde(skip_serializing_if = "Option::is_none")]
160    tool_choice: Option<AnthropicToolChoice>,
161    #[serde(skip_serializing_if = "Vec::is_empty")]
162    stop_sequences: Vec<String>,
163    #[serde(skip_serializing_if = "Option::is_none")]
164    temperature: Option<f32>,
165    stream: bool,
166}
167
168#[derive(Serialize)]
169struct AnthropicThinking {
170    #[serde(rename = "type")]
171    thinking_type: String,
172    #[serde(skip_serializing_if = "Option::is_none")]
173    budget_tokens: Option<u32>,
174}
175
176#[derive(Serialize)]
177struct AnthropicMessage {
178    role: String,
179    content: Vec<AnthropicContent>,
180}
181
182#[derive(Serialize, Clone)]
183#[serde(tag = "type")]
184enum AnthropicContent {
185    #[serde(rename = "text")]
186    Text { text: String },
187    #[serde(rename = "thinking")]
188    Thinking { thinking: String, signature: String },
189    #[serde(rename = "redacted_thinking")]
190    RedactedThinking { data: String },
191    #[serde(rename = "image")]
192    Image { source: AnthropicImageSource },
193    #[serde(rename = "tool_use")]
194    ToolUse {
195        id: String,
196        name: String,
197        input: serde_json::Value,
198    },
199    #[serde(rename = "tool_result")]
200    ToolResult {
201        tool_use_id: String,
202        is_error: bool,
203        content: String,
204    },
205}
206
207#[derive(Serialize, Clone)]
208struct AnthropicImageSource {
209    #[serde(rename = "type")]
210    source_type: String,
211    media_type: String,
212    data: String,
213}
214
215#[derive(Serialize)]
216struct AnthropicTool {
217    name: String,
218    description: String,
219    input_schema: serde_json::Value,
220}
221
222#[derive(Serialize)]
223#[serde(tag = "type", rename_all = "lowercase")]
224enum AnthropicToolChoice {
225    Auto,
226    Any,
227    None,
228}
229
230// Anthropic API Response Types
231
232#[derive(Deserialize, Debug)]
233#[serde(tag = "type")]
234#[allow(dead_code)]
235enum AnthropicEvent {
236    #[serde(rename = "message_start")]
237    MessageStart { message: AnthropicMessageResponse },
238    #[serde(rename = "content_block_start")]
239    ContentBlockStart {
240        index: usize,
241        content_block: AnthropicContentBlock,
242    },
243    #[serde(rename = "content_block_delta")]
244    ContentBlockDelta { index: usize, delta: AnthropicDelta },
245    #[serde(rename = "content_block_stop")]
246    ContentBlockStop { index: usize },
247    #[serde(rename = "message_delta")]
248    MessageDelta {
249        delta: AnthropicMessageDelta,
250        usage: AnthropicUsage,
251    },
252    #[serde(rename = "message_stop")]
253    MessageStop,
254    #[serde(rename = "ping")]
255    Ping,
256    #[serde(rename = "error")]
257    Error { error: AnthropicApiError },
258}
259
260#[derive(Deserialize, Debug)]
261struct AnthropicMessageResponse {
262    #[allow(dead_code)]
263    id: String,
264    #[allow(dead_code)]
265    role: String,
266    #[serde(default)]
267    usage: AnthropicUsage,
268}
269
270#[derive(Deserialize, Debug)]
271#[serde(tag = "type")]
272enum AnthropicContentBlock {
273    #[serde(rename = "text")]
274    Text { text: String },
275    #[serde(rename = "thinking")]
276    Thinking { thinking: String },
277    #[serde(rename = "redacted_thinking")]
278    RedactedThinking { data: String },
279    #[serde(rename = "tool_use")]
280    ToolUse { id: String, name: String },
281}
282
283#[derive(Deserialize, Debug)]
284#[serde(tag = "type")]
285enum AnthropicDelta {
286    #[serde(rename = "text_delta")]
287    TextDelta { text: String },
288    #[serde(rename = "thinking_delta")]
289    ThinkingDelta { thinking: String },
290    #[serde(rename = "signature_delta")]
291    SignatureDelta { signature: String },
292    #[serde(rename = "input_json_delta")]
293    InputJsonDelta { partial_json: String },
294}
295
296#[derive(Deserialize, Debug)]
297struct AnthropicMessageDelta {
298    stop_reason: Option<String>,
299}
300
301#[derive(Deserialize, Debug, Default)]
302struct AnthropicUsage {
303    #[serde(default)]
304    input_tokens: Option<u64>,
305    #[serde(default)]
306    output_tokens: Option<u64>,
307    #[serde(default)]
308    cache_creation_input_tokens: Option<u64>,
309    #[serde(default)]
310    cache_read_input_tokens: Option<u64>,
311}
312
313#[derive(Deserialize, Debug)]
314struct AnthropicApiError {
315    #[serde(rename = "type")]
316    #[allow(dead_code)]
317    error_type: String,
318    message: String,
319}
320
321fn convert_request(
322    model_id: &str,
323    request: &LlmCompletionRequest,
324) -> Result<AnthropicRequest, String> {
325    let model_def =
326        get_model_definition(model_id).ok_or_else(|| format!("Unknown model: {}", model_id))?;
327
328    let mut messages: Vec<AnthropicMessage> = Vec::new();
329    let mut system_message = String::new();
330
331    for msg in &request.messages {
332        match msg.role {
333            LlmMessageRole::System => {
334                for content in &msg.content {
335                    if let LlmMessageContent::Text(text) = content {
336                        if !system_message.is_empty() {
337                            system_message.push('\n');
338                        }
339                        system_message.push_str(text);
340                    }
341                }
342            }
343            LlmMessageRole::User => {
344                let mut contents: Vec<AnthropicContent> = Vec::new();
345
346                for content in &msg.content {
347                    match content {
348                        LlmMessageContent::Text(text) => {
349                            if !text.is_empty() {
350                                contents.push(AnthropicContent::Text { text: text.clone() });
351                            }
352                        }
353                        LlmMessageContent::Image(img) => {
354                            contents.push(AnthropicContent::Image {
355                                source: AnthropicImageSource {
356                                    source_type: "base64".to_string(),
357                                    media_type: "image/png".to_string(),
358                                    data: img.source.clone(),
359                                },
360                            });
361                        }
362                        LlmMessageContent::ToolResult(result) => {
363                            let content_text = match &result.content {
364                                LlmToolResultContent::Text(t) => t.clone(),
365                                LlmToolResultContent::Image(_) => "[Image]".to_string(),
366                            };
367                            contents.push(AnthropicContent::ToolResult {
368                                tool_use_id: result.tool_use_id.clone(),
369                                is_error: result.is_error,
370                                content: content_text,
371                            });
372                        }
373                        _ => {}
374                    }
375                }
376
377                if !contents.is_empty() {
378                    messages.push(AnthropicMessage {
379                        role: "user".to_string(),
380                        content: contents,
381                    });
382                }
383            }
384            LlmMessageRole::Assistant => {
385                let mut contents: Vec<AnthropicContent> = Vec::new();
386
387                for content in &msg.content {
388                    match content {
389                        LlmMessageContent::Text(text) => {
390                            if !text.is_empty() {
391                                contents.push(AnthropicContent::Text { text: text.clone() });
392                            }
393                        }
394                        LlmMessageContent::ToolUse(tool_use) => {
395                            let input: serde_json::Value =
396                                serde_json::from_str(&tool_use.input).unwrap_or_default();
397                            contents.push(AnthropicContent::ToolUse {
398                                id: tool_use.id.clone(),
399                                name: tool_use.name.clone(),
400                                input,
401                            });
402                        }
403                        LlmMessageContent::Thinking(thinking) => {
404                            if !thinking.text.is_empty() {
405                                contents.push(AnthropicContent::Thinking {
406                                    thinking: thinking.text.clone(),
407                                    signature: thinking.signature.clone().unwrap_or_default(),
408                                });
409                            }
410                        }
411                        LlmMessageContent::RedactedThinking(data) => {
412                            if !data.is_empty() {
413                                contents.push(AnthropicContent::RedactedThinking {
414                                    data: data.clone(),
415                                });
416                            }
417                        }
418                        _ => {}
419                    }
420                }
421
422                if !contents.is_empty() {
423                    messages.push(AnthropicMessage {
424                        role: "assistant".to_string(),
425                        content: contents,
426                    });
427                }
428            }
429        }
430    }
431
432    let tools: Vec<AnthropicTool> = request
433        .tools
434        .iter()
435        .map(|t| AnthropicTool {
436            name: t.name.clone(),
437            description: t.description.clone(),
438            input_schema: serde_json::from_str(&t.input_schema)
439                .unwrap_or(serde_json::Value::Object(Default::default())),
440        })
441        .collect();
442
443    let tool_choice = request.tool_choice.as_ref().map(|tc| match tc {
444        LlmToolChoice::Auto => AnthropicToolChoice::Auto,
445        LlmToolChoice::Any => AnthropicToolChoice::Any,
446        LlmToolChoice::None => AnthropicToolChoice::None,
447    });
448
449    let thinking = if model_def.supports_thinking && request.thinking_allowed {
450        Some(AnthropicThinking {
451            thinking_type: "enabled".to_string(),
452            budget_tokens: Some(4096),
453        })
454    } else {
455        None
456    };
457
458    Ok(AnthropicRequest {
459        model: model_def.real_id.to_string(),
460        max_tokens: model_def.max_output_tokens,
461        messages,
462        system: if system_message.is_empty() {
463            None
464        } else {
465            Some(system_message)
466        },
467        thinking,
468        tools,
469        tool_choice,
470        stop_sequences: request.stop_sequences.clone(),
471        temperature: request.temperature,
472        stream: true,
473    })
474}
475
476fn parse_sse_line(line: &str) -> Option<AnthropicEvent> {
477    let data = line.strip_prefix("data: ")?;
478    serde_json::from_str(data).ok()
479}
480
481impl zed::Extension for AnthropicProvider {
482    fn new() -> Self {
483        Self {
484            streams: Mutex::new(HashMap::new()),
485            next_stream_id: Mutex::new(0),
486        }
487    }
488
489    fn llm_providers(&self) -> Vec<LlmProviderInfo> {
490        vec![LlmProviderInfo {
491            id: "anthropic".into(),
492            name: "Anthropic".into(),
493            icon: Some("icons/anthropic.svg".into()),
494        }]
495    }
496
497    fn llm_provider_models(&self, _provider_id: &str) -> Result<Vec<LlmModelInfo>, String> {
498        Ok(MODELS
499            .iter()
500            .map(|m| LlmModelInfo {
501                id: m.display_name.to_string(),
502                name: m.display_name.to_string(),
503                max_token_count: m.max_tokens,
504                max_output_tokens: Some(m.max_output_tokens),
505                capabilities: LlmModelCapabilities {
506                    supports_images: m.supports_images,
507                    supports_tools: true,
508                    supports_tool_choice_auto: true,
509                    supports_tool_choice_any: true,
510                    supports_tool_choice_none: true,
511                    supports_thinking: m.supports_thinking,
512                    tool_input_format: LlmToolInputFormat::JsonSchema,
513                },
514                is_default: m.is_default,
515                is_default_fast: m.is_default_fast,
516            })
517            .collect())
518    }
519
520    fn llm_provider_is_authenticated(&self, _provider_id: &str) -> bool {
521        llm_get_credential("anthropic").is_some()
522    }
523
524    fn llm_provider_settings_markdown(&self, _provider_id: &str) -> Option<String> {
525        Some(
526            r#"# Anthropic Setup
527
528Welcome to **Anthropic**! This extension provides access to Claude models.
529
530## Configuration
531
532Enter your Anthropic API key below. You can get your API key at [console.anthropic.com](https://console.anthropic.com/).
533
534## Available Models
535
536| Display Name | Real Model | Context | Output |
537|--------------|------------|---------|--------|
538| Claude Opus 4.5 | claude-opus-4-5 | 200K | 8K |
539| Claude Opus 4.5 Thinking | claude-opus-4-5 | 200K | 8K |
540| Claude Sonnet 4.5 | claude-sonnet-4-5 | 200K | 8K |
541| Claude Sonnet 4.5 Thinking | claude-sonnet-4-5 | 200K | 8K |
542| Claude Sonnet 4 | claude-sonnet-4 | 200K | 8K |
543| Claude Sonnet 4 Thinking | claude-sonnet-4 | 200K | 8K |
544| Claude Haiku 4.5 | claude-haiku-4-5 | 200K | 64K |
545| Claude Haiku 4.5 Thinking | claude-haiku-4-5 | 200K | 64K |
546| Claude 3.5 Sonnet | claude-3-5-sonnet | 200K | 8K |
547| Claude 3.5 Haiku | claude-3-5-haiku | 200K | 8K |
548
549## Features
550
551- ✅ Full streaming support
552- ✅ Tool/function calling
553- ✅ Vision (image inputs)
554- ✅ Extended thinking support
555- ✅ All Claude models
556
557## Pricing
558
559Uses your Anthropic API credits. See [Anthropic pricing](https://www.anthropic.com/pricing) for details.
560"#
561            .to_string(),
562        )
563    }
564
565    fn llm_provider_authenticate(&mut self, _provider_id: &str) -> Result<(), String> {
566        let provided = llm_request_credential(
567            "anthropic",
568            LlmCredentialType::ApiKey,
569            "Anthropic API Key",
570            "sk-ant-...",
571        )?;
572        if provided {
573            Ok(())
574        } else {
575            Err("Authentication cancelled".to_string())
576        }
577    }
578
579    fn llm_provider_reset_credentials(&mut self, _provider_id: &str) -> Result<(), String> {
580        llm_delete_credential("anthropic")
581    }
582
583    fn llm_stream_completion_start(
584        &mut self,
585        _provider_id: &str,
586        model_id: &str,
587        request: &LlmCompletionRequest,
588    ) -> Result<String, String> {
589        let api_key = llm_get_credential("anthropic").ok_or_else(|| {
590            "No API key configured. Please add your Anthropic API key in settings.".to_string()
591        })?;
592
593        let anthropic_request = convert_request(model_id, request)?;
594
595        let body = serde_json::to_vec(&anthropic_request)
596            .map_err(|e| format!("Failed to serialize request: {}", e))?;
597
598        let http_request = HttpRequest {
599            method: HttpMethod::Post,
600            url: "https://api.anthropic.com/v1/messages".to_string(),
601            headers: vec![
602                ("Content-Type".to_string(), "application/json".to_string()),
603                ("x-api-key".to_string(), api_key),
604                ("anthropic-version".to_string(), "2023-06-01".to_string()),
605            ],
606            body: Some(body),
607            redirect_policy: RedirectPolicy::FollowAll,
608        };
609
610        let response_stream = http_request
611            .fetch_stream()
612            .map_err(|e| format!("HTTP request failed: {}", e))?;
613
614        let stream_id = {
615            let mut id_counter = self.next_stream_id.lock().unwrap();
616            let id = format!("anthropic-stream-{}", *id_counter);
617            *id_counter += 1;
618            id
619        };
620
621        self.streams.lock().unwrap().insert(
622            stream_id.clone(),
623            StreamState {
624                response_stream: Some(response_stream),
625                buffer: String::new(),
626                started: false,
627                current_tool_use: None,
628                stop_reason: None,
629                pending_signature: None,
630            },
631        );
632
633        Ok(stream_id)
634    }
635
636    fn llm_stream_completion_next(
637        &mut self,
638        stream_id: &str,
639    ) -> Result<Option<LlmCompletionEvent>, String> {
640        let mut streams = self.streams.lock().unwrap();
641        let state = streams
642            .get_mut(stream_id)
643            .ok_or_else(|| format!("Unknown stream: {}", stream_id))?;
644
645        if !state.started {
646            state.started = true;
647            return Ok(Some(LlmCompletionEvent::Started));
648        }
649
650        let response_stream = state
651            .response_stream
652            .as_mut()
653            .ok_or_else(|| "Stream already closed".to_string())?;
654
655        loop {
656            if let Some(newline_pos) = state.buffer.find('\n') {
657                let line = state.buffer[..newline_pos].to_string();
658                state.buffer = state.buffer[newline_pos + 1..].to_string();
659
660                if line.trim().is_empty() || line.starts_with("event:") {
661                    continue;
662                }
663
664                if let Some(event) = parse_sse_line(&line) {
665                    match event {
666                        AnthropicEvent::MessageStart { message } => {
667                            if let (Some(input), Some(output)) =
668                                (message.usage.input_tokens, message.usage.output_tokens)
669                            {
670                                return Ok(Some(LlmCompletionEvent::Usage(LlmTokenUsage {
671                                    input_tokens: input,
672                                    output_tokens: output,
673                                    cache_creation_input_tokens: message
674                                        .usage
675                                        .cache_creation_input_tokens,
676                                    cache_read_input_tokens: message.usage.cache_read_input_tokens,
677                                })));
678                            }
679                        }
680                        AnthropicEvent::ContentBlockStart { content_block, .. } => {
681                            match content_block {
682                                AnthropicContentBlock::Text { text } => {
683                                    if !text.is_empty() {
684                                        return Ok(Some(LlmCompletionEvent::Text(text)));
685                                    }
686                                }
687                                AnthropicContentBlock::Thinking { thinking } => {
688                                    return Ok(Some(LlmCompletionEvent::Thinking(
689                                        LlmThinkingContent {
690                                            text: thinking,
691                                            signature: None,
692                                        },
693                                    )));
694                                }
695                                AnthropicContentBlock::RedactedThinking { data } => {
696                                    return Ok(Some(LlmCompletionEvent::RedactedThinking(data)));
697                                }
698                                AnthropicContentBlock::ToolUse { id, name } => {
699                                    state.current_tool_use = Some(ToolUseState {
700                                        id,
701                                        name,
702                                        input_json: String::new(),
703                                    });
704                                }
705                            }
706                        }
707                        AnthropicEvent::ContentBlockDelta { delta, .. } => match delta {
708                            AnthropicDelta::TextDelta { text } => {
709                                if !text.is_empty() {
710                                    return Ok(Some(LlmCompletionEvent::Text(text)));
711                                }
712                            }
713                            AnthropicDelta::ThinkingDelta { thinking } => {
714                                return Ok(Some(LlmCompletionEvent::Thinking(
715                                    LlmThinkingContent {
716                                        text: thinking,
717                                        signature: None,
718                                    },
719                                )));
720                            }
721                            AnthropicDelta::SignatureDelta { signature } => {
722                                state.pending_signature = Some(signature.clone());
723                                return Ok(Some(LlmCompletionEvent::Thinking(
724                                    LlmThinkingContent {
725                                        text: String::new(),
726                                        signature: Some(signature),
727                                    },
728                                )));
729                            }
730                            AnthropicDelta::InputJsonDelta { partial_json } => {
731                                if let Some(ref mut tool_use) = state.current_tool_use {
732                                    tool_use.input_json.push_str(&partial_json);
733                                }
734                            }
735                        },
736                        AnthropicEvent::ContentBlockStop { .. } => {
737                            if let Some(tool_use) = state.current_tool_use.take() {
738                                return Ok(Some(LlmCompletionEvent::ToolUse(LlmToolUse {
739                                    id: tool_use.id,
740                                    name: tool_use.name,
741                                    input: tool_use.input_json,
742                                    thought_signature: state.pending_signature.take(),
743                                })));
744                            }
745                        }
746                        AnthropicEvent::MessageDelta { delta, usage } => {
747                            if let Some(reason) = delta.stop_reason {
748                                state.stop_reason = Some(match reason.as_str() {
749                                    "end_turn" => LlmStopReason::EndTurn,
750                                    "max_tokens" => LlmStopReason::MaxTokens,
751                                    "tool_use" => LlmStopReason::ToolUse,
752                                    _ => LlmStopReason::EndTurn,
753                                });
754                            }
755                            if let Some(output) = usage.output_tokens {
756                                return Ok(Some(LlmCompletionEvent::Usage(LlmTokenUsage {
757                                    input_tokens: usage.input_tokens.unwrap_or(0),
758                                    output_tokens: output,
759                                    cache_creation_input_tokens: usage.cache_creation_input_tokens,
760                                    cache_read_input_tokens: usage.cache_read_input_tokens,
761                                })));
762                            }
763                        }
764                        AnthropicEvent::MessageStop => {
765                            if let Some(stop_reason) = state.stop_reason.take() {
766                                return Ok(Some(LlmCompletionEvent::Stop(stop_reason)));
767                            }
768                            return Ok(Some(LlmCompletionEvent::Stop(LlmStopReason::EndTurn)));
769                        }
770                        AnthropicEvent::Ping => {}
771                        AnthropicEvent::Error { error } => {
772                            return Err(format!("API error: {}", error.message));
773                        }
774                    }
775                }
776
777                continue;
778            }
779
780            match response_stream.next_chunk() {
781                Ok(Some(chunk)) => {
782                    let text = String::from_utf8_lossy(&chunk);
783                    state.buffer.push_str(&text);
784                }
785                Ok(None) => {
786                    if let Some(stop_reason) = state.stop_reason.take() {
787                        return Ok(Some(LlmCompletionEvent::Stop(stop_reason)));
788                    }
789                    return Ok(None);
790                }
791                Err(e) => {
792                    return Err(format!("Stream error: {}", e));
793                }
794            }
795        }
796    }
797
798    fn llm_stream_completion_close(&mut self, stream_id: &str) {
799        self.streams.lock().unwrap().remove(stream_id);
800    }
801}
802
803zed::register_extension!(AnthropicProvider);