anthropic.rs

  1use crate::AllLanguageModelSettings;
  2use crate::ui::InstructionListItem;
  3use anthropic::{AnthropicError, AnthropicModelMode, ContentDelta, Event, ResponseContent, Usage};
  4use anyhow::{Context as _, Result, anyhow};
  5use collections::{BTreeMap, HashMap};
  6use credentials_provider::CredentialsProvider;
  7use editor::{Editor, EditorElement, EditorStyle};
  8use futures::Stream;
  9use futures::{FutureExt, StreamExt, TryStreamExt as _, future::BoxFuture, stream::BoxStream};
 10use gpui::{
 11    AnyView, App, AsyncApp, Context, Entity, FontStyle, Subscription, Task, TextStyle, WhiteSpace,
 12};
 13use http_client::HttpClient;
 14use language_model::{
 15    AuthenticateError, LanguageModel, LanguageModelCacheConfiguration, LanguageModelId,
 16    LanguageModelName, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName,
 17    LanguageModelProviderState, LanguageModelRequest, MessageContent, RateLimiter, Role,
 18};
 19use language_model::{LanguageModelCompletionEvent, LanguageModelToolUse, StopReason};
 20use schemars::JsonSchema;
 21use serde::{Deserialize, Serialize};
 22use settings::{Settings, SettingsStore};
 23use std::pin::Pin;
 24use std::str::FromStr;
 25use std::sync::Arc;
 26use strum::IntoEnumIterator;
 27use theme::ThemeSettings;
 28use ui::{Icon, IconName, List, Tooltip, prelude::*};
 29use util::{ResultExt, maybe};
 30
 31const PROVIDER_ID: &str = language_model::ANTHROPIC_PROVIDER_ID;
 32const PROVIDER_NAME: &str = "Anthropic";
 33
 34#[derive(Default, Clone, Debug, PartialEq)]
 35pub struct AnthropicSettings {
 36    pub api_url: String,
 37    /// Extend Zed's list of Anthropic models.
 38    pub available_models: Vec<AvailableModel>,
 39    pub needs_setting_migration: bool,
 40}
 41
 42#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
 43pub struct AvailableModel {
 44    /// The model's name in the Anthropic API. e.g. claude-3-5-sonnet-latest, claude-3-opus-20240229, etc
 45    pub name: String,
 46    /// The model's name in Zed's UI, such as in the model selector dropdown menu in the assistant panel.
 47    pub display_name: Option<String>,
 48    /// The model's context window size.
 49    pub max_tokens: usize,
 50    /// A model `name` to substitute when calling tools, in case the primary model doesn't support tool calling.
 51    pub tool_override: Option<String>,
 52    /// Configuration of Anthropic's caching API.
 53    pub cache_configuration: Option<LanguageModelCacheConfiguration>,
 54    pub max_output_tokens: Option<u32>,
 55    pub default_temperature: Option<f32>,
 56    #[serde(default)]
 57    pub extra_beta_headers: Vec<String>,
 58    /// The model's mode (e.g. thinking)
 59    pub mode: Option<ModelMode>,
 60}
 61
 62#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize, JsonSchema)]
 63#[serde(tag = "type", rename_all = "lowercase")]
 64pub enum ModelMode {
 65    #[default]
 66    Default,
 67    Thinking {
 68        /// The maximum number of tokens to use for reasoning. Must be lower than the model's `max_output_tokens`.
 69        budget_tokens: Option<u32>,
 70    },
 71}
 72
 73impl From<ModelMode> for AnthropicModelMode {
 74    fn from(value: ModelMode) -> Self {
 75        match value {
 76            ModelMode::Default => AnthropicModelMode::Default,
 77            ModelMode::Thinking { budget_tokens } => AnthropicModelMode::Thinking { budget_tokens },
 78        }
 79    }
 80}
 81
 82impl From<AnthropicModelMode> for ModelMode {
 83    fn from(value: AnthropicModelMode) -> Self {
 84        match value {
 85            AnthropicModelMode::Default => ModelMode::Default,
 86            AnthropicModelMode::Thinking { budget_tokens } => ModelMode::Thinking { budget_tokens },
 87        }
 88    }
 89}
 90
 91pub struct AnthropicLanguageModelProvider {
 92    http_client: Arc<dyn HttpClient>,
 93    state: gpui::Entity<State>,
 94}
 95
 96const ANTHROPIC_API_KEY_VAR: &str = "ANTHROPIC_API_KEY";
 97
 98pub struct State {
 99    api_key: Option<String>,
100    api_key_from_env: bool,
101    _subscription: Subscription,
102}
103
104impl State {
105    fn reset_api_key(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
106        let credentials_provider = <dyn CredentialsProvider>::global(cx);
107        let api_url = AllLanguageModelSettings::get_global(cx)
108            .anthropic
109            .api_url
110            .clone();
111        cx.spawn(async move |this, cx| {
112            credentials_provider
113                .delete_credentials(&api_url, &cx)
114                .await
115                .ok();
116            this.update(cx, |this, cx| {
117                this.api_key = None;
118                this.api_key_from_env = false;
119                cx.notify();
120            })
121        })
122    }
123
124    fn set_api_key(&mut self, api_key: String, cx: &mut Context<Self>) -> Task<Result<()>> {
125        let credentials_provider = <dyn CredentialsProvider>::global(cx);
126        let api_url = AllLanguageModelSettings::get_global(cx)
127            .anthropic
128            .api_url
129            .clone();
130        cx.spawn(async move |this, cx| {
131            credentials_provider
132                .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx)
133                .await
134                .ok();
135
136            this.update(cx, |this, cx| {
137                this.api_key = Some(api_key);
138                cx.notify();
139            })
140        })
141    }
142
143    fn is_authenticated(&self) -> bool {
144        self.api_key.is_some()
145    }
146
147    fn authenticate(&self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
148        if self.is_authenticated() {
149            return Task::ready(Ok(()));
150        }
151
152        let credentials_provider = <dyn CredentialsProvider>::global(cx);
153        let api_url = AllLanguageModelSettings::get_global(cx)
154            .anthropic
155            .api_url
156            .clone();
157
158        cx.spawn(async move |this, cx| {
159            let (api_key, from_env) = if let Ok(api_key) = std::env::var(ANTHROPIC_API_KEY_VAR) {
160                (api_key, true)
161            } else {
162                let (_, api_key) = credentials_provider
163                    .read_credentials(&api_url, &cx)
164                    .await?
165                    .ok_or(AuthenticateError::CredentialsNotFound)?;
166                (
167                    String::from_utf8(api_key).context("invalid {PROVIDER_NAME} API key")?,
168                    false,
169                )
170            };
171
172            this.update(cx, |this, cx| {
173                this.api_key = Some(api_key);
174                this.api_key_from_env = from_env;
175                cx.notify();
176            })?;
177
178            Ok(())
179        })
180    }
181}
182
183impl AnthropicLanguageModelProvider {
184    pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
185        let state = cx.new(|cx| State {
186            api_key: None,
187            api_key_from_env: false,
188            _subscription: cx.observe_global::<SettingsStore>(|_, cx| {
189                cx.notify();
190            }),
191        });
192
193        Self { http_client, state }
194    }
195}
196
197impl LanguageModelProviderState for AnthropicLanguageModelProvider {
198    type ObservableEntity = State;
199
200    fn observable_entity(&self) -> Option<gpui::Entity<Self::ObservableEntity>> {
201        Some(self.state.clone())
202    }
203}
204
205impl LanguageModelProvider for AnthropicLanguageModelProvider {
206    fn id(&self) -> LanguageModelProviderId {
207        LanguageModelProviderId(PROVIDER_ID.into())
208    }
209
210    fn name(&self) -> LanguageModelProviderName {
211        LanguageModelProviderName(PROVIDER_NAME.into())
212    }
213
214    fn icon(&self) -> IconName {
215        IconName::AiAnthropic
216    }
217
218    fn default_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
219        let model = anthropic::Model::default();
220        Some(Arc::new(AnthropicModel {
221            id: LanguageModelId::from(model.id().to_string()),
222            model,
223            state: self.state.clone(),
224            http_client: self.http_client.clone(),
225            request_limiter: RateLimiter::new(4),
226        }))
227    }
228
229    fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
230        let mut models = BTreeMap::default();
231
232        // Add base models from anthropic::Model::iter()
233        for model in anthropic::Model::iter() {
234            if !matches!(model, anthropic::Model::Custom { .. }) {
235                models.insert(model.id().to_string(), model);
236            }
237        }
238
239        // Override with available models from settings
240        for model in AllLanguageModelSettings::get_global(cx)
241            .anthropic
242            .available_models
243            .iter()
244        {
245            models.insert(
246                model.name.clone(),
247                anthropic::Model::Custom {
248                    name: model.name.clone(),
249                    display_name: model.display_name.clone(),
250                    max_tokens: model.max_tokens,
251                    tool_override: model.tool_override.clone(),
252                    cache_configuration: model.cache_configuration.as_ref().map(|config| {
253                        anthropic::AnthropicModelCacheConfiguration {
254                            max_cache_anchors: config.max_cache_anchors,
255                            should_speculate: config.should_speculate,
256                            min_total_token: config.min_total_token,
257                        }
258                    }),
259                    max_output_tokens: model.max_output_tokens,
260                    default_temperature: model.default_temperature,
261                    extra_beta_headers: model.extra_beta_headers.clone(),
262                    mode: model.mode.clone().unwrap_or_default().into(),
263                },
264            );
265        }
266
267        models
268            .into_values()
269            .map(|model| {
270                Arc::new(AnthropicModel {
271                    id: LanguageModelId::from(model.id().to_string()),
272                    model,
273                    state: self.state.clone(),
274                    http_client: self.http_client.clone(),
275                    request_limiter: RateLimiter::new(4),
276                }) as Arc<dyn LanguageModel>
277            })
278            .collect()
279    }
280
281    fn is_authenticated(&self, cx: &App) -> bool {
282        self.state.read(cx).is_authenticated()
283    }
284
285    fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
286        self.state.update(cx, |state, cx| state.authenticate(cx))
287    }
288
289    fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView {
290        cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx))
291            .into()
292    }
293
294    fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
295        self.state.update(cx, |state, cx| state.reset_api_key(cx))
296    }
297}
298
299pub struct AnthropicModel {
300    id: LanguageModelId,
301    model: anthropic::Model,
302    state: gpui::Entity<State>,
303    http_client: Arc<dyn HttpClient>,
304    request_limiter: RateLimiter,
305}
306
307pub fn count_anthropic_tokens(
308    request: LanguageModelRequest,
309    cx: &App,
310) -> BoxFuture<'static, Result<usize>> {
311    cx.background_spawn(async move {
312        let messages = request.messages;
313        let mut tokens_from_images = 0;
314        let mut string_messages = Vec::with_capacity(messages.len());
315
316        for message in messages {
317            use language_model::MessageContent;
318
319            let mut string_contents = String::new();
320
321            for content in message.content {
322                match content {
323                    MessageContent::Text(text) => {
324                        string_contents.push_str(&text);
325                    }
326                    MessageContent::Image(image) => {
327                        tokens_from_images += image.estimate_tokens();
328                    }
329                    MessageContent::ToolUse(_tool_use) => {
330                        // TODO: Estimate token usage from tool uses.
331                    }
332                    MessageContent::ToolResult(tool_result) => {
333                        string_contents.push_str(&tool_result.content);
334                    }
335                }
336            }
337
338            if !string_contents.is_empty() {
339                string_messages.push(tiktoken_rs::ChatCompletionRequestMessage {
340                    role: match message.role {
341                        Role::User => "user".into(),
342                        Role::Assistant => "assistant".into(),
343                        Role::System => "system".into(),
344                    },
345                    content: Some(string_contents),
346                    name: None,
347                    function_call: None,
348                });
349            }
350        }
351
352        // Tiktoken doesn't yet support these models, so we manually use the
353        // same tokenizer as GPT-4.
354        tiktoken_rs::num_tokens_from_messages("gpt-4", &string_messages)
355            .map(|tokens| tokens + tokens_from_images)
356    })
357    .boxed()
358}
359
360impl AnthropicModel {
361    fn stream_completion(
362        &self,
363        request: anthropic::Request,
364        cx: &AsyncApp,
365    ) -> BoxFuture<'static, Result<BoxStream<'static, Result<anthropic::Event, AnthropicError>>>>
366    {
367        let http_client = self.http_client.clone();
368
369        let Ok((api_key, api_url)) = cx.read_entity(&self.state, |state, cx| {
370            let settings = &AllLanguageModelSettings::get_global(cx).anthropic;
371            (state.api_key.clone(), settings.api_url.clone())
372        }) else {
373            return futures::future::ready(Err(anyhow!("App state dropped"))).boxed();
374        };
375
376        async move {
377            let api_key = api_key.ok_or_else(|| anyhow!("Missing Anthropic API Key"))?;
378            let request =
379                anthropic::stream_completion(http_client.as_ref(), &api_url, &api_key, request);
380            request.await.context("failed to stream completion")
381        }
382        .boxed()
383    }
384}
385
386impl LanguageModel for AnthropicModel {
387    fn id(&self) -> LanguageModelId {
388        self.id.clone()
389    }
390
391    fn name(&self) -> LanguageModelName {
392        LanguageModelName::from(self.model.display_name().to_string())
393    }
394
395    fn provider_id(&self) -> LanguageModelProviderId {
396        LanguageModelProviderId(PROVIDER_ID.into())
397    }
398
399    fn provider_name(&self) -> LanguageModelProviderName {
400        LanguageModelProviderName(PROVIDER_NAME.into())
401    }
402
403    fn telemetry_id(&self) -> String {
404        format!("anthropic/{}", self.model.id())
405    }
406
407    fn api_key(&self, cx: &App) -> Option<String> {
408        self.state.read(cx).api_key.clone()
409    }
410
411    fn max_token_count(&self) -> usize {
412        self.model.max_token_count()
413    }
414
415    fn max_output_tokens(&self) -> Option<u32> {
416        Some(self.model.max_output_tokens())
417    }
418
419    fn count_tokens(
420        &self,
421        request: LanguageModelRequest,
422        cx: &App,
423    ) -> BoxFuture<'static, Result<usize>> {
424        count_anthropic_tokens(request, cx)
425    }
426
427    fn stream_completion(
428        &self,
429        request: LanguageModelRequest,
430        cx: &AsyncApp,
431    ) -> BoxFuture<'static, Result<BoxStream<'static, Result<LanguageModelCompletionEvent>>>> {
432        let request = into_anthropic(
433            request,
434            self.model.request_id().into(),
435            self.model.default_temperature(),
436            self.model.max_output_tokens(),
437            self.model.mode(),
438        );
439        let request = self.stream_completion(request, cx);
440        let future = self.request_limiter.stream(async move {
441            let response = request.await.map_err(|err| anyhow!(err))?;
442            Ok(map_to_language_model_completion_events(response))
443        });
444        async move { Ok(future.await?.boxed()) }.boxed()
445    }
446
447    fn cache_configuration(&self) -> Option<LanguageModelCacheConfiguration> {
448        self.model
449            .cache_configuration()
450            .map(|config| LanguageModelCacheConfiguration {
451                max_cache_anchors: config.max_cache_anchors,
452                should_speculate: config.should_speculate,
453                min_total_token: config.min_total_token,
454            })
455    }
456
457    fn use_any_tool(
458        &self,
459        request: LanguageModelRequest,
460        tool_name: String,
461        tool_description: String,
462        input_schema: serde_json::Value,
463        cx: &AsyncApp,
464    ) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
465        let mut request = into_anthropic(
466            request,
467            self.model.tool_model_id().into(),
468            self.model.default_temperature(),
469            self.model.max_output_tokens(),
470            self.model.mode(),
471        );
472        request.tool_choice = Some(anthropic::ToolChoice::Tool {
473            name: tool_name.clone(),
474        });
475        request.tools = vec![anthropic::Tool {
476            name: tool_name.clone(),
477            description: tool_description,
478            input_schema,
479        }];
480
481        let response = self.stream_completion(request, cx);
482        self.request_limiter
483            .run(async move {
484                let response = response.await?;
485                Ok(anthropic::extract_tool_args_from_events(
486                    tool_name,
487                    Box::pin(response.map_err(|e| anyhow!(e))),
488                )
489                .await?
490                .boxed())
491            })
492            .boxed()
493    }
494}
495
496pub fn into_anthropic(
497    request: LanguageModelRequest,
498    model: String,
499    default_temperature: f32,
500    max_output_tokens: u32,
501    mode: AnthropicModelMode,
502) -> anthropic::Request {
503    let mut new_messages: Vec<anthropic::Message> = Vec::new();
504    let mut system_message = String::new();
505
506    for message in request.messages {
507        if message.contents_empty() {
508            continue;
509        }
510
511        match message.role {
512            Role::User | Role::Assistant => {
513                let cache_control = if message.cache {
514                    Some(anthropic::CacheControl {
515                        cache_type: anthropic::CacheControlType::Ephemeral,
516                    })
517                } else {
518                    None
519                };
520                let anthropic_message_content: Vec<anthropic::RequestContent> = message
521                    .content
522                    .into_iter()
523                    .filter_map(|content| match content {
524                        MessageContent::Text(text) => {
525                            if !text.is_empty() {
526                                Some(anthropic::RequestContent::Text {
527                                    text,
528                                    cache_control,
529                                })
530                            } else {
531                                None
532                            }
533                        }
534                        MessageContent::Image(image) => Some(anthropic::RequestContent::Image {
535                            source: anthropic::ImageSource {
536                                source_type: "base64".to_string(),
537                                media_type: "image/png".to_string(),
538                                data: image.source.to_string(),
539                            },
540                            cache_control,
541                        }),
542                        MessageContent::ToolUse(tool_use) => {
543                            Some(anthropic::RequestContent::ToolUse {
544                                id: tool_use.id.to_string(),
545                                name: tool_use.name.to_string(),
546                                input: tool_use.input,
547                                cache_control,
548                            })
549                        }
550                        MessageContent::ToolResult(tool_result) => {
551                            Some(anthropic::RequestContent::ToolResult {
552                                tool_use_id: tool_result.tool_use_id.to_string(),
553                                is_error: tool_result.is_error,
554                                content: tool_result.content.to_string(),
555                                cache_control,
556                            })
557                        }
558                    })
559                    .collect();
560                let anthropic_role = match message.role {
561                    Role::User => anthropic::Role::User,
562                    Role::Assistant => anthropic::Role::Assistant,
563                    Role::System => unreachable!("System role should never occur here"),
564                };
565                if let Some(last_message) = new_messages.last_mut() {
566                    if last_message.role == anthropic_role {
567                        last_message.content.extend(anthropic_message_content);
568                        continue;
569                    }
570                }
571                new_messages.push(anthropic::Message {
572                    role: anthropic_role,
573                    content: anthropic_message_content,
574                });
575            }
576            Role::System => {
577                if !system_message.is_empty() {
578                    system_message.push_str("\n\n");
579                }
580                system_message.push_str(&message.string_contents());
581            }
582        }
583    }
584
585    anthropic::Request {
586        model,
587        messages: new_messages,
588        max_tokens: max_output_tokens,
589        system: if system_message.is_empty() {
590            None
591        } else {
592            Some(anthropic::StringOrContents::String(system_message))
593        },
594        thinking: if let AnthropicModelMode::Thinking { budget_tokens } = mode {
595            Some(anthropic::Thinking::Enabled { budget_tokens })
596        } else {
597            None
598        },
599        tools: request
600            .tools
601            .into_iter()
602            .map(|tool| anthropic::Tool {
603                name: tool.name,
604                description: tool.description,
605                input_schema: tool.input_schema,
606            })
607            .collect(),
608        tool_choice: None,
609        metadata: None,
610        stop_sequences: Vec::new(),
611        temperature: request.temperature.or(Some(default_temperature)),
612        top_k: None,
613        top_p: None,
614    }
615}
616
617pub fn map_to_language_model_completion_events(
618    events: Pin<Box<dyn Send + Stream<Item = Result<Event, AnthropicError>>>>,
619) -> impl Stream<Item = Result<LanguageModelCompletionEvent>> {
620    struct RawToolUse {
621        id: String,
622        name: String,
623        input_json: String,
624    }
625
626    struct State {
627        events: Pin<Box<dyn Send + Stream<Item = Result<Event, AnthropicError>>>>,
628        tool_uses_by_index: HashMap<usize, RawToolUse>,
629        usage: Usage,
630        stop_reason: StopReason,
631    }
632
633    futures::stream::unfold(
634        State {
635            events,
636            tool_uses_by_index: HashMap::default(),
637            usage: Usage::default(),
638            stop_reason: StopReason::EndTurn,
639        },
640        |mut state| async move {
641            while let Some(event) = state.events.next().await {
642                match event {
643                    Ok(event) => match event {
644                        Event::ContentBlockStart {
645                            index,
646                            content_block,
647                        } => match content_block {
648                            ResponseContent::Text { text } => {
649                                return Some((
650                                    vec![Ok(LanguageModelCompletionEvent::Text(text))],
651                                    state,
652                                ));
653                            }
654                            ResponseContent::Thinking { thinking } => {
655                                return Some((
656                                    vec![Ok(LanguageModelCompletionEvent::Thinking(thinking))],
657                                    state,
658                                ));
659                            }
660                            ResponseContent::RedactedThinking { .. } => {
661                                // Redacted thinking is encrypted and not accessible to the user, see:
662                                // https://docs.anthropic.com/en/docs/build-with-claude/extended-thinking#suggestions-for-handling-redacted-thinking-in-production
663                            }
664                            ResponseContent::ToolUse { id, name, .. } => {
665                                state.tool_uses_by_index.insert(
666                                    index,
667                                    RawToolUse {
668                                        id,
669                                        name,
670                                        input_json: String::new(),
671                                    },
672                                );
673                            }
674                        },
675                        Event::ContentBlockDelta { index, delta } => match delta {
676                            ContentDelta::TextDelta { text } => {
677                                return Some((
678                                    vec![Ok(LanguageModelCompletionEvent::Text(text))],
679                                    state,
680                                ));
681                            }
682                            ContentDelta::ThinkingDelta { thinking } => {
683                                return Some((
684                                    vec![Ok(LanguageModelCompletionEvent::Thinking(thinking))],
685                                    state,
686                                ));
687                            }
688                            ContentDelta::SignatureDelta { .. } => {}
689                            ContentDelta::InputJsonDelta { partial_json } => {
690                                if let Some(tool_use) = state.tool_uses_by_index.get_mut(&index) {
691                                    tool_use.input_json.push_str(&partial_json);
692                                }
693                            }
694                        },
695                        Event::ContentBlockStop { index } => {
696                            if let Some(tool_use) = state.tool_uses_by_index.remove(&index) {
697                                return Some((
698                                    vec![maybe!({
699                                        Ok(LanguageModelCompletionEvent::ToolUse(
700                                            LanguageModelToolUse {
701                                                id: tool_use.id.into(),
702                                                name: tool_use.name.into(),
703                                                input: if tool_use.input_json.is_empty() {
704                                                    serde_json::Value::Object(
705                                                        serde_json::Map::default(),
706                                                    )
707                                                } else {
708                                                    serde_json::Value::from_str(
709                                                        &tool_use.input_json,
710                                                    )
711                                                    .map_err(|err| anyhow!(err))?
712                                                },
713                                            },
714                                        ))
715                                    })],
716                                    state,
717                                ));
718                            }
719                        }
720                        Event::MessageStart { message } => {
721                            update_usage(&mut state.usage, &message.usage);
722                            return Some((
723                                vec![
724                                    Ok(LanguageModelCompletionEvent::StartMessage {
725                                        message_id: message.id,
726                                    }),
727                                    Ok(LanguageModelCompletionEvent::UsageUpdate(convert_usage(
728                                        &state.usage,
729                                    ))),
730                                ],
731                                state,
732                            ));
733                        }
734                        Event::MessageDelta { delta, usage } => {
735                            update_usage(&mut state.usage, &usage);
736                            if let Some(stop_reason) = delta.stop_reason.as_deref() {
737                                state.stop_reason = match stop_reason {
738                                    "end_turn" => StopReason::EndTurn,
739                                    "max_tokens" => StopReason::MaxTokens,
740                                    "tool_use" => StopReason::ToolUse,
741                                    _ => {
742                                        log::error!(
743                                            "Unexpected anthropic stop_reason: {stop_reason}"
744                                        );
745                                        StopReason::EndTurn
746                                    }
747                                };
748                            }
749                            return Some((
750                                vec![Ok(LanguageModelCompletionEvent::UsageUpdate(
751                                    convert_usage(&state.usage),
752                                ))],
753                                state,
754                            ));
755                        }
756                        Event::MessageStop => {
757                            return Some((
758                                vec![Ok(LanguageModelCompletionEvent::Stop(state.stop_reason))],
759                                state,
760                            ));
761                        }
762                        Event::Error { error } => {
763                            return Some((
764                                vec![Err(anyhow!(AnthropicError::ApiError(error)))],
765                                state,
766                            ));
767                        }
768                        _ => {}
769                    },
770                    Err(err) => {
771                        return Some((vec![Err(anyhow!(err))], state));
772                    }
773                }
774            }
775
776            None
777        },
778    )
779    .flat_map(futures::stream::iter)
780}
781
782/// Updates usage data by preferring counts from `new`.
783fn update_usage(usage: &mut Usage, new: &Usage) {
784    if let Some(input_tokens) = new.input_tokens {
785        usage.input_tokens = Some(input_tokens);
786    }
787    if let Some(output_tokens) = new.output_tokens {
788        usage.output_tokens = Some(output_tokens);
789    }
790    if let Some(cache_creation_input_tokens) = new.cache_creation_input_tokens {
791        usage.cache_creation_input_tokens = Some(cache_creation_input_tokens);
792    }
793    if let Some(cache_read_input_tokens) = new.cache_read_input_tokens {
794        usage.cache_read_input_tokens = Some(cache_read_input_tokens);
795    }
796}
797
798fn convert_usage(usage: &Usage) -> language_model::TokenUsage {
799    language_model::TokenUsage {
800        input_tokens: usage.input_tokens.unwrap_or(0),
801        output_tokens: usage.output_tokens.unwrap_or(0),
802        cache_creation_input_tokens: usage.cache_creation_input_tokens.unwrap_or(0),
803        cache_read_input_tokens: usage.cache_read_input_tokens.unwrap_or(0),
804    }
805}
806
807struct ConfigurationView {
808    api_key_editor: Entity<Editor>,
809    state: gpui::Entity<State>,
810    load_credentials_task: Option<Task<()>>,
811}
812
813impl ConfigurationView {
814    const PLACEHOLDER_TEXT: &'static str = "sk-ant-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx";
815
816    fn new(state: gpui::Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
817        cx.observe(&state, |_, _, cx| {
818            cx.notify();
819        })
820        .detach();
821
822        let load_credentials_task = Some(cx.spawn({
823            let state = state.clone();
824            async move |this, cx| {
825                if let Some(task) = state
826                    .update(cx, |state, cx| state.authenticate(cx))
827                    .log_err()
828                {
829                    // We don't log an error, because "not signed in" is also an error.
830                    let _ = task.await;
831                }
832                this.update(cx, |this, cx| {
833                    this.load_credentials_task = None;
834                    cx.notify();
835                })
836                .log_err();
837            }
838        }));
839
840        Self {
841            api_key_editor: cx.new(|cx| {
842                let mut editor = Editor::single_line(window, cx);
843                editor.set_placeholder_text(Self::PLACEHOLDER_TEXT, cx);
844                editor
845            }),
846            state,
847            load_credentials_task,
848        }
849    }
850
851    fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
852        let api_key = self.api_key_editor.read(cx).text(cx);
853        if api_key.is_empty() {
854            return;
855        }
856
857        let state = self.state.clone();
858        cx.spawn_in(window, async move |_, cx| {
859            state
860                .update(cx, |state, cx| state.set_api_key(api_key, cx))?
861                .await
862        })
863        .detach_and_log_err(cx);
864
865        cx.notify();
866    }
867
868    fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context<Self>) {
869        self.api_key_editor
870            .update(cx, |editor, cx| editor.set_text("", window, cx));
871
872        let state = self.state.clone();
873        cx.spawn_in(window, async move |_, cx| {
874            state.update(cx, |state, cx| state.reset_api_key(cx))?.await
875        })
876        .detach_and_log_err(cx);
877
878        cx.notify();
879    }
880
881    fn render_api_key_editor(&self, cx: &mut Context<Self>) -> impl IntoElement {
882        let settings = ThemeSettings::get_global(cx);
883        let text_style = TextStyle {
884            color: cx.theme().colors().text,
885            font_family: settings.ui_font.family.clone(),
886            font_features: settings.ui_font.features.clone(),
887            font_fallbacks: settings.ui_font.fallbacks.clone(),
888            font_size: rems(0.875).into(),
889            font_weight: settings.ui_font.weight,
890            font_style: FontStyle::Normal,
891            line_height: relative(1.3),
892            white_space: WhiteSpace::Normal,
893            ..Default::default()
894        };
895        EditorElement::new(
896            &self.api_key_editor,
897            EditorStyle {
898                background: cx.theme().colors().editor_background,
899                local_player: cx.theme().players().local(),
900                text: text_style,
901                ..Default::default()
902            },
903        )
904    }
905
906    fn should_render_editor(&self, cx: &mut Context<Self>) -> bool {
907        !self.state.read(cx).is_authenticated()
908    }
909}
910
911impl Render for ConfigurationView {
912    fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
913        let env_var_set = self.state.read(cx).api_key_from_env;
914
915        if self.load_credentials_task.is_some() {
916            div().child(Label::new("Loading credentials...")).into_any()
917        } else if self.should_render_editor(cx) {
918            v_flex()
919                .size_full()
920                .on_action(cx.listener(Self::save_api_key))
921                .child(Label::new("To use Zed's assistant with Anthropic, you need to add an API key. Follow these steps:"))
922                .child(
923                    List::new()
924                        .child(
925                            InstructionListItem::new(
926                                "Create one by visiting",
927                                Some("Anthropic's settings"),
928                                Some("https://console.anthropic.com/settings/keys")
929                            )
930                        )
931                        .child(
932                            InstructionListItem::text_only("Paste your API key below and hit enter to start using the assistant")
933                        )
934                )
935                .child(
936                    h_flex()
937                        .w_full()
938                        .my_2()
939                        .px_2()
940                        .py_1()
941                        .bg(cx.theme().colors().editor_background)
942                        .border_1()
943                        .border_color(cx.theme().colors().border_variant)
944                        .rounded_sm()
945                        .child(self.render_api_key_editor(cx)),
946                )
947                .child(
948                    Label::new(
949                        format!("You can also assign the {ANTHROPIC_API_KEY_VAR} environment variable and restart Zed."),
950                    )
951                    .size(LabelSize::Small)
952                    .color(Color::Muted),
953                )
954                .into_any()
955        } else {
956            h_flex()
957                .size_full()
958                .justify_between()
959                .child(
960                    h_flex()
961                        .gap_1()
962                        .child(Icon::new(IconName::Check).color(Color::Success))
963                        .child(Label::new(if env_var_set {
964                            format!("API key set in {ANTHROPIC_API_KEY_VAR} environment variable.")
965                        } else {
966                            "API key configured.".to_string()
967                        })),
968                )
969                .child(
970                    Button::new("reset-key", "Reset key")
971                        .icon(Some(IconName::Trash))
972                        .icon_size(IconSize::Small)
973                        .icon_position(IconPosition::Start)
974                        .disabled(env_var_set)
975                        .when(env_var_set, |this| {
976                            this.tooltip(Tooltip::text(format!("To reset your API key, unset the {ANTHROPIC_API_KEY_VAR} environment variable.")))
977                        })
978                        .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx))),
979                )
980                .into_any()
981        }
982    }
983}