anthropic.rs

  1use crate::AllLanguageModelSettings;
  2use crate::ui::InstructionListItem;
  3use anthropic::{AnthropicError, AnthropicModelMode, ContentDelta, Event, ResponseContent, Usage};
  4use anyhow::{Context as _, Result, anyhow};
  5use collections::{BTreeMap, HashMap};
  6use credentials_provider::CredentialsProvider;
  7use editor::{Editor, EditorElement, EditorStyle};
  8use futures::Stream;
  9use futures::{FutureExt, StreamExt, future::BoxFuture, stream::BoxStream};
 10use gpui::{
 11    AnyView, App, AsyncApp, Context, Entity, FontStyle, Subscription, Task, TextStyle, WhiteSpace,
 12};
 13use http_client::HttpClient;
 14use language_model::{
 15    AuthenticateError, LanguageModel, LanguageModelCacheConfiguration, LanguageModelId,
 16    LanguageModelKnownError, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
 17    LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest, MessageContent,
 18    RateLimiter, Role,
 19};
 20use language_model::{LanguageModelCompletionEvent, LanguageModelToolUse, StopReason};
 21use schemars::JsonSchema;
 22use serde::{Deserialize, Serialize};
 23use settings::{Settings, SettingsStore};
 24use std::pin::Pin;
 25use std::str::FromStr;
 26use std::sync::Arc;
 27use strum::IntoEnumIterator;
 28use theme::ThemeSettings;
 29use ui::{Icon, IconName, List, Tooltip, prelude::*};
 30use util::{ResultExt, maybe};
 31
 32const PROVIDER_ID: &str = language_model::ANTHROPIC_PROVIDER_ID;
 33const PROVIDER_NAME: &str = "Anthropic";
 34
 35#[derive(Default, Clone, Debug, PartialEq)]
 36pub struct AnthropicSettings {
 37    pub api_url: String,
 38    /// Extend Zed's list of Anthropic models.
 39    pub available_models: Vec<AvailableModel>,
 40    pub needs_setting_migration: bool,
 41}
 42
 43#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
 44pub struct AvailableModel {
 45    /// The model's name in the Anthropic API. e.g. claude-3-5-sonnet-latest, claude-3-opus-20240229, etc
 46    pub name: String,
 47    /// The model's name in Zed's UI, such as in the model selector dropdown menu in the assistant panel.
 48    pub display_name: Option<String>,
 49    /// The model's context window size.
 50    pub max_tokens: usize,
 51    /// A model `name` to substitute when calling tools, in case the primary model doesn't support tool calling.
 52    pub tool_override: Option<String>,
 53    /// Configuration of Anthropic's caching API.
 54    pub cache_configuration: Option<LanguageModelCacheConfiguration>,
 55    pub max_output_tokens: Option<u32>,
 56    pub default_temperature: Option<f32>,
 57    #[serde(default)]
 58    pub extra_beta_headers: Vec<String>,
 59    /// The model's mode (e.g. thinking)
 60    pub mode: Option<ModelMode>,
 61}
 62
 63#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize, JsonSchema)]
 64#[serde(tag = "type", rename_all = "lowercase")]
 65pub enum ModelMode {
 66    #[default]
 67    Default,
 68    Thinking {
 69        /// The maximum number of tokens to use for reasoning. Must be lower than the model's `max_output_tokens`.
 70        budget_tokens: Option<u32>,
 71    },
 72}
 73
 74impl From<ModelMode> for AnthropicModelMode {
 75    fn from(value: ModelMode) -> Self {
 76        match value {
 77            ModelMode::Default => AnthropicModelMode::Default,
 78            ModelMode::Thinking { budget_tokens } => AnthropicModelMode::Thinking { budget_tokens },
 79        }
 80    }
 81}
 82
 83impl From<AnthropicModelMode> for ModelMode {
 84    fn from(value: AnthropicModelMode) -> Self {
 85        match value {
 86            AnthropicModelMode::Default => ModelMode::Default,
 87            AnthropicModelMode::Thinking { budget_tokens } => ModelMode::Thinking { budget_tokens },
 88        }
 89    }
 90}
 91
 92pub struct AnthropicLanguageModelProvider {
 93    http_client: Arc<dyn HttpClient>,
 94    state: gpui::Entity<State>,
 95}
 96
 97const ANTHROPIC_API_KEY_VAR: &str = "ANTHROPIC_API_KEY";
 98
 99pub struct State {
100    api_key: Option<String>,
101    api_key_from_env: bool,
102    _subscription: Subscription,
103}
104
105impl State {
106    fn reset_api_key(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
107        let credentials_provider = <dyn CredentialsProvider>::global(cx);
108        let api_url = AllLanguageModelSettings::get_global(cx)
109            .anthropic
110            .api_url
111            .clone();
112        cx.spawn(async move |this, cx| {
113            credentials_provider
114                .delete_credentials(&api_url, &cx)
115                .await
116                .ok();
117            this.update(cx, |this, cx| {
118                this.api_key = None;
119                this.api_key_from_env = false;
120                cx.notify();
121            })
122        })
123    }
124
125    fn set_api_key(&mut self, api_key: String, cx: &mut Context<Self>) -> Task<Result<()>> {
126        let credentials_provider = <dyn CredentialsProvider>::global(cx);
127        let api_url = AllLanguageModelSettings::get_global(cx)
128            .anthropic
129            .api_url
130            .clone();
131        cx.spawn(async move |this, cx| {
132            credentials_provider
133                .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx)
134                .await
135                .ok();
136
137            this.update(cx, |this, cx| {
138                this.api_key = Some(api_key);
139                cx.notify();
140            })
141        })
142    }
143
144    fn is_authenticated(&self) -> bool {
145        self.api_key.is_some()
146    }
147
148    fn authenticate(&self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
149        if self.is_authenticated() {
150            return Task::ready(Ok(()));
151        }
152
153        let credentials_provider = <dyn CredentialsProvider>::global(cx);
154        let api_url = AllLanguageModelSettings::get_global(cx)
155            .anthropic
156            .api_url
157            .clone();
158
159        cx.spawn(async move |this, cx| {
160            let (api_key, from_env) = if let Ok(api_key) = std::env::var(ANTHROPIC_API_KEY_VAR) {
161                (api_key, true)
162            } else {
163                let (_, api_key) = credentials_provider
164                    .read_credentials(&api_url, &cx)
165                    .await?
166                    .ok_or(AuthenticateError::CredentialsNotFound)?;
167                (
168                    String::from_utf8(api_key).context("invalid {PROVIDER_NAME} API key")?,
169                    false,
170                )
171            };
172
173            this.update(cx, |this, cx| {
174                this.api_key = Some(api_key);
175                this.api_key_from_env = from_env;
176                cx.notify();
177            })?;
178
179            Ok(())
180        })
181    }
182}
183
184impl AnthropicLanguageModelProvider {
185    pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
186        let state = cx.new(|cx| State {
187            api_key: None,
188            api_key_from_env: false,
189            _subscription: cx.observe_global::<SettingsStore>(|_, cx| {
190                cx.notify();
191            }),
192        });
193
194        Self { http_client, state }
195    }
196
197    fn create_language_model(&self, model: anthropic::Model) -> Arc<dyn LanguageModel> {
198        Arc::new(AnthropicModel {
199            id: LanguageModelId::from(model.id().to_string()),
200            model,
201            state: self.state.clone(),
202            http_client: self.http_client.clone(),
203            request_limiter: RateLimiter::new(4),
204        }) as Arc<dyn LanguageModel>
205    }
206}
207
208impl LanguageModelProviderState for AnthropicLanguageModelProvider {
209    type ObservableEntity = State;
210
211    fn observable_entity(&self) -> Option<gpui::Entity<Self::ObservableEntity>> {
212        Some(self.state.clone())
213    }
214}
215
216impl LanguageModelProvider for AnthropicLanguageModelProvider {
217    fn id(&self) -> LanguageModelProviderId {
218        LanguageModelProviderId(PROVIDER_ID.into())
219    }
220
221    fn name(&self) -> LanguageModelProviderName {
222        LanguageModelProviderName(PROVIDER_NAME.into())
223    }
224
225    fn icon(&self) -> IconName {
226        IconName::AiAnthropic
227    }
228
229    fn default_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
230        let model = anthropic::Model::default();
231        Some(Arc::new(AnthropicModel {
232            id: LanguageModelId::from(model.id().to_string()),
233            model,
234            state: self.state.clone(),
235            http_client: self.http_client.clone(),
236            request_limiter: RateLimiter::new(4),
237        }))
238    }
239
240    fn recommended_models(&self, _cx: &App) -> Vec<Arc<dyn LanguageModel>> {
241        [
242            anthropic::Model::Claude3_7Sonnet,
243            anthropic::Model::Claude3_7SonnetThinking,
244        ]
245        .into_iter()
246        .map(|model| self.create_language_model(model))
247        .collect()
248    }
249
250    fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
251        let mut models = BTreeMap::default();
252
253        // Add base models from anthropic::Model::iter()
254        for model in anthropic::Model::iter() {
255            if !matches!(model, anthropic::Model::Custom { .. }) {
256                models.insert(model.id().to_string(), model);
257            }
258        }
259
260        // Override with available models from settings
261        for model in AllLanguageModelSettings::get_global(cx)
262            .anthropic
263            .available_models
264            .iter()
265        {
266            models.insert(
267                model.name.clone(),
268                anthropic::Model::Custom {
269                    name: model.name.clone(),
270                    display_name: model.display_name.clone(),
271                    max_tokens: model.max_tokens,
272                    tool_override: model.tool_override.clone(),
273                    cache_configuration: model.cache_configuration.as_ref().map(|config| {
274                        anthropic::AnthropicModelCacheConfiguration {
275                            max_cache_anchors: config.max_cache_anchors,
276                            should_speculate: config.should_speculate,
277                            min_total_token: config.min_total_token,
278                        }
279                    }),
280                    max_output_tokens: model.max_output_tokens,
281                    default_temperature: model.default_temperature,
282                    extra_beta_headers: model.extra_beta_headers.clone(),
283                    mode: model.mode.clone().unwrap_or_default().into(),
284                },
285            );
286        }
287
288        models
289            .into_values()
290            .map(|model| self.create_language_model(model))
291            .collect()
292    }
293
294    fn is_authenticated(&self, cx: &App) -> bool {
295        self.state.read(cx).is_authenticated()
296    }
297
298    fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
299        self.state.update(cx, |state, cx| state.authenticate(cx))
300    }
301
302    fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView {
303        cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx))
304            .into()
305    }
306
307    fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
308        self.state.update(cx, |state, cx| state.reset_api_key(cx))
309    }
310}
311
312pub struct AnthropicModel {
313    id: LanguageModelId,
314    model: anthropic::Model,
315    state: gpui::Entity<State>,
316    http_client: Arc<dyn HttpClient>,
317    request_limiter: RateLimiter,
318}
319
320pub fn count_anthropic_tokens(
321    request: LanguageModelRequest,
322    cx: &App,
323) -> BoxFuture<'static, Result<usize>> {
324    cx.background_spawn(async move {
325        let messages = request.messages;
326        let mut tokens_from_images = 0;
327        let mut string_messages = Vec::with_capacity(messages.len());
328
329        for message in messages {
330            use language_model::MessageContent;
331
332            let mut string_contents = String::new();
333
334            for content in message.content {
335                match content {
336                    MessageContent::Text(text) => {
337                        string_contents.push_str(&text);
338                    }
339                    MessageContent::Image(image) => {
340                        tokens_from_images += image.estimate_tokens();
341                    }
342                    MessageContent::ToolUse(_tool_use) => {
343                        // TODO: Estimate token usage from tool uses.
344                    }
345                    MessageContent::ToolResult(tool_result) => {
346                        string_contents.push_str(&tool_result.content);
347                    }
348                }
349            }
350
351            if !string_contents.is_empty() {
352                string_messages.push(tiktoken_rs::ChatCompletionRequestMessage {
353                    role: match message.role {
354                        Role::User => "user".into(),
355                        Role::Assistant => "assistant".into(),
356                        Role::System => "system".into(),
357                    },
358                    content: Some(string_contents),
359                    name: None,
360                    function_call: None,
361                });
362            }
363        }
364
365        // Tiktoken doesn't yet support these models, so we manually use the
366        // same tokenizer as GPT-4.
367        tiktoken_rs::num_tokens_from_messages("gpt-4", &string_messages)
368            .map(|tokens| tokens + tokens_from_images)
369    })
370    .boxed()
371}
372
373impl AnthropicModel {
374    fn stream_completion(
375        &self,
376        request: anthropic::Request,
377        cx: &AsyncApp,
378    ) -> BoxFuture<'static, Result<BoxStream<'static, Result<anthropic::Event, AnthropicError>>>>
379    {
380        let http_client = self.http_client.clone();
381
382        let Ok((api_key, api_url)) = cx.read_entity(&self.state, |state, cx| {
383            let settings = &AllLanguageModelSettings::get_global(cx).anthropic;
384            (state.api_key.clone(), settings.api_url.clone())
385        }) else {
386            return futures::future::ready(Err(anyhow!("App state dropped"))).boxed();
387        };
388
389        async move {
390            let api_key = api_key.ok_or_else(|| anyhow!("Missing Anthropic API Key"))?;
391            let request =
392                anthropic::stream_completion(http_client.as_ref(), &api_url, &api_key, request);
393            request.await.context("failed to stream completion")
394        }
395        .boxed()
396    }
397}
398
399impl LanguageModel for AnthropicModel {
400    fn id(&self) -> LanguageModelId {
401        self.id.clone()
402    }
403
404    fn name(&self) -> LanguageModelName {
405        LanguageModelName::from(self.model.display_name().to_string())
406    }
407
408    fn provider_id(&self) -> LanguageModelProviderId {
409        LanguageModelProviderId(PROVIDER_ID.into())
410    }
411
412    fn provider_name(&self) -> LanguageModelProviderName {
413        LanguageModelProviderName(PROVIDER_NAME.into())
414    }
415
416    fn supports_tools(&self) -> bool {
417        true
418    }
419
420    fn telemetry_id(&self) -> String {
421        format!("anthropic/{}", self.model.id())
422    }
423
424    fn api_key(&self, cx: &App) -> Option<String> {
425        self.state.read(cx).api_key.clone()
426    }
427
428    fn max_token_count(&self) -> usize {
429        self.model.max_token_count()
430    }
431
432    fn max_output_tokens(&self) -> Option<u32> {
433        Some(self.model.max_output_tokens())
434    }
435
436    fn count_tokens(
437        &self,
438        request: LanguageModelRequest,
439        cx: &App,
440    ) -> BoxFuture<'static, Result<usize>> {
441        count_anthropic_tokens(request, cx)
442    }
443
444    fn stream_completion(
445        &self,
446        request: LanguageModelRequest,
447        cx: &AsyncApp,
448    ) -> BoxFuture<'static, Result<BoxStream<'static, Result<LanguageModelCompletionEvent>>>> {
449        let request = into_anthropic(
450            request,
451            self.model.request_id().into(),
452            self.model.default_temperature(),
453            self.model.max_output_tokens(),
454            self.model.mode(),
455        );
456        let request = self.stream_completion(request, cx);
457        let future = self.request_limiter.stream(async move {
458            let response = request
459                .await
460                .map_err(|err| match err.downcast::<AnthropicError>() {
461                    Ok(anthropic_err) => anthropic_err_to_anyhow(anthropic_err),
462                    Err(err) => anyhow!(err),
463                })?;
464            Ok(map_to_language_model_completion_events(response))
465        });
466        async move { Ok(future.await?.boxed()) }.boxed()
467    }
468
469    fn cache_configuration(&self) -> Option<LanguageModelCacheConfiguration> {
470        self.model
471            .cache_configuration()
472            .map(|config| LanguageModelCacheConfiguration {
473                max_cache_anchors: config.max_cache_anchors,
474                should_speculate: config.should_speculate,
475                min_total_token: config.min_total_token,
476            })
477    }
478}
479
480pub fn into_anthropic(
481    request: LanguageModelRequest,
482    model: String,
483    default_temperature: f32,
484    max_output_tokens: u32,
485    mode: AnthropicModelMode,
486) -> anthropic::Request {
487    let mut new_messages: Vec<anthropic::Message> = Vec::new();
488    let mut system_message = String::new();
489
490    for message in request.messages {
491        if message.contents_empty() {
492            continue;
493        }
494
495        match message.role {
496            Role::User | Role::Assistant => {
497                let cache_control = if message.cache {
498                    Some(anthropic::CacheControl {
499                        cache_type: anthropic::CacheControlType::Ephemeral,
500                    })
501                } else {
502                    None
503                };
504                let anthropic_message_content: Vec<anthropic::RequestContent> = message
505                    .content
506                    .into_iter()
507                    .filter_map(|content| match content {
508                        MessageContent::Text(text) => {
509                            if !text.is_empty() {
510                                Some(anthropic::RequestContent::Text {
511                                    text,
512                                    cache_control,
513                                })
514                            } else {
515                                None
516                            }
517                        }
518                        MessageContent::Image(image) => Some(anthropic::RequestContent::Image {
519                            source: anthropic::ImageSource {
520                                source_type: "base64".to_string(),
521                                media_type: "image/png".to_string(),
522                                data: image.source.to_string(),
523                            },
524                            cache_control,
525                        }),
526                        MessageContent::ToolUse(tool_use) => {
527                            Some(anthropic::RequestContent::ToolUse {
528                                id: tool_use.id.to_string(),
529                                name: tool_use.name.to_string(),
530                                input: tool_use.input,
531                                cache_control,
532                            })
533                        }
534                        MessageContent::ToolResult(tool_result) => {
535                            Some(anthropic::RequestContent::ToolResult {
536                                tool_use_id: tool_result.tool_use_id.to_string(),
537                                is_error: tool_result.is_error,
538                                content: tool_result.content.to_string(),
539                                cache_control,
540                            })
541                        }
542                    })
543                    .collect();
544                let anthropic_role = match message.role {
545                    Role::User => anthropic::Role::User,
546                    Role::Assistant => anthropic::Role::Assistant,
547                    Role::System => unreachable!("System role should never occur here"),
548                };
549                if let Some(last_message) = new_messages.last_mut() {
550                    if last_message.role == anthropic_role {
551                        last_message.content.extend(anthropic_message_content);
552                        continue;
553                    }
554                }
555                new_messages.push(anthropic::Message {
556                    role: anthropic_role,
557                    content: anthropic_message_content,
558                });
559            }
560            Role::System => {
561                if !system_message.is_empty() {
562                    system_message.push_str("\n\n");
563                }
564                system_message.push_str(&message.string_contents());
565            }
566        }
567    }
568
569    anthropic::Request {
570        model,
571        messages: new_messages,
572        max_tokens: max_output_tokens,
573        system: if system_message.is_empty() {
574            None
575        } else {
576            Some(anthropic::StringOrContents::String(system_message))
577        },
578        thinking: if let AnthropicModelMode::Thinking { budget_tokens } = mode {
579            Some(anthropic::Thinking::Enabled { budget_tokens })
580        } else {
581            None
582        },
583        tools: request
584            .tools
585            .into_iter()
586            .map(|tool| anthropic::Tool {
587                name: tool.name,
588                description: tool.description,
589                input_schema: tool.input_schema,
590            })
591            .collect(),
592        tool_choice: None,
593        metadata: None,
594        stop_sequences: Vec::new(),
595        temperature: request.temperature.or(Some(default_temperature)),
596        top_k: None,
597        top_p: None,
598    }
599}
600
601pub fn map_to_language_model_completion_events(
602    events: Pin<Box<dyn Send + Stream<Item = Result<Event, AnthropicError>>>>,
603) -> impl Stream<Item = Result<LanguageModelCompletionEvent>> {
604    struct RawToolUse {
605        id: String,
606        name: String,
607        input_json: String,
608    }
609
610    struct State {
611        events: Pin<Box<dyn Send + Stream<Item = Result<Event, AnthropicError>>>>,
612        tool_uses_by_index: HashMap<usize, RawToolUse>,
613        usage: Usage,
614        stop_reason: StopReason,
615    }
616
617    futures::stream::unfold(
618        State {
619            events,
620            tool_uses_by_index: HashMap::default(),
621            usage: Usage::default(),
622            stop_reason: StopReason::EndTurn,
623        },
624        |mut state| async move {
625            while let Some(event) = state.events.next().await {
626                match event {
627                    Ok(event) => match event {
628                        Event::ContentBlockStart {
629                            index,
630                            content_block,
631                        } => match content_block {
632                            ResponseContent::Text { text } => {
633                                return Some((
634                                    vec![Ok(LanguageModelCompletionEvent::Text(text))],
635                                    state,
636                                ));
637                            }
638                            ResponseContent::Thinking { thinking } => {
639                                return Some((
640                                    vec![Ok(LanguageModelCompletionEvent::Thinking(thinking))],
641                                    state,
642                                ));
643                            }
644                            ResponseContent::RedactedThinking { .. } => {
645                                // Redacted thinking is encrypted and not accessible to the user, see:
646                                // https://docs.anthropic.com/en/docs/build-with-claude/extended-thinking#suggestions-for-handling-redacted-thinking-in-production
647                            }
648                            ResponseContent::ToolUse { id, name, .. } => {
649                                state.tool_uses_by_index.insert(
650                                    index,
651                                    RawToolUse {
652                                        id,
653                                        name,
654                                        input_json: String::new(),
655                                    },
656                                );
657                            }
658                        },
659                        Event::ContentBlockDelta { index, delta } => match delta {
660                            ContentDelta::TextDelta { text } => {
661                                return Some((
662                                    vec![Ok(LanguageModelCompletionEvent::Text(text))],
663                                    state,
664                                ));
665                            }
666                            ContentDelta::ThinkingDelta { thinking } => {
667                                return Some((
668                                    vec![Ok(LanguageModelCompletionEvent::Thinking(thinking))],
669                                    state,
670                                ));
671                            }
672                            ContentDelta::SignatureDelta { .. } => {}
673                            ContentDelta::InputJsonDelta { partial_json } => {
674                                if let Some(tool_use) = state.tool_uses_by_index.get_mut(&index) {
675                                    tool_use.input_json.push_str(&partial_json);
676                                }
677                            }
678                        },
679                        Event::ContentBlockStop { index } => {
680                            if let Some(tool_use) = state.tool_uses_by_index.remove(&index) {
681                                return Some((
682                                    vec![maybe!({
683                                        Ok(LanguageModelCompletionEvent::ToolUse(
684                                            LanguageModelToolUse {
685                                                id: tool_use.id.into(),
686                                                name: tool_use.name.into(),
687                                                input: if tool_use.input_json.is_empty() {
688                                                    serde_json::Value::Object(
689                                                        serde_json::Map::default(),
690                                                    )
691                                                } else {
692                                                    serde_json::Value::from_str(
693                                                        &tool_use.input_json,
694                                                    )
695                                                    .map_err(|err| anyhow!(err))?
696                                                },
697                                            },
698                                        ))
699                                    })],
700                                    state,
701                                ));
702                            }
703                        }
704                        Event::MessageStart { message } => {
705                            update_usage(&mut state.usage, &message.usage);
706                            return Some((
707                                vec![
708                                    Ok(LanguageModelCompletionEvent::UsageUpdate(convert_usage(
709                                        &state.usage,
710                                    ))),
711                                    Ok(LanguageModelCompletionEvent::StartMessage {
712                                        message_id: message.id,
713                                    }),
714                                ],
715                                state,
716                            ));
717                        }
718                        Event::MessageDelta { delta, usage } => {
719                            update_usage(&mut state.usage, &usage);
720                            if let Some(stop_reason) = delta.stop_reason.as_deref() {
721                                state.stop_reason = match stop_reason {
722                                    "end_turn" => StopReason::EndTurn,
723                                    "max_tokens" => StopReason::MaxTokens,
724                                    "tool_use" => StopReason::ToolUse,
725                                    _ => {
726                                        log::error!(
727                                            "Unexpected anthropic stop_reason: {stop_reason}"
728                                        );
729                                        StopReason::EndTurn
730                                    }
731                                };
732                            }
733                            return Some((
734                                vec![Ok(LanguageModelCompletionEvent::UsageUpdate(
735                                    convert_usage(&state.usage),
736                                ))],
737                                state,
738                            ));
739                        }
740                        Event::MessageStop => {
741                            return Some((
742                                vec![Ok(LanguageModelCompletionEvent::Stop(state.stop_reason))],
743                                state,
744                            ));
745                        }
746                        Event::Error { error } => {
747                            return Some((
748                                vec![Err(anyhow!(AnthropicError::ApiError(error)))],
749                                state,
750                            ));
751                        }
752                        _ => {}
753                    },
754                    Err(err) => {
755                        return Some((vec![Err(anthropic_err_to_anyhow(err))], state));
756                    }
757                }
758            }
759
760            None
761        },
762    )
763    .flat_map(futures::stream::iter)
764}
765
766pub fn anthropic_err_to_anyhow(err: AnthropicError) -> anyhow::Error {
767    if let AnthropicError::ApiError(api_err) = &err {
768        if let Some(tokens) = api_err.match_window_exceeded() {
769            return anyhow!(LanguageModelKnownError::ContextWindowLimitExceeded { tokens });
770        }
771    }
772
773    anyhow!(err)
774}
775
776/// Updates usage data by preferring counts from `new`.
777fn update_usage(usage: &mut Usage, new: &Usage) {
778    if let Some(input_tokens) = new.input_tokens {
779        usage.input_tokens = Some(input_tokens);
780    }
781    if let Some(output_tokens) = new.output_tokens {
782        usage.output_tokens = Some(output_tokens);
783    }
784    if let Some(cache_creation_input_tokens) = new.cache_creation_input_tokens {
785        usage.cache_creation_input_tokens = Some(cache_creation_input_tokens);
786    }
787    if let Some(cache_read_input_tokens) = new.cache_read_input_tokens {
788        usage.cache_read_input_tokens = Some(cache_read_input_tokens);
789    }
790}
791
792fn convert_usage(usage: &Usage) -> language_model::TokenUsage {
793    language_model::TokenUsage {
794        input_tokens: usage.input_tokens.unwrap_or(0),
795        output_tokens: usage.output_tokens.unwrap_or(0),
796        cache_creation_input_tokens: usage.cache_creation_input_tokens.unwrap_or(0),
797        cache_read_input_tokens: usage.cache_read_input_tokens.unwrap_or(0),
798    }
799}
800
801struct ConfigurationView {
802    api_key_editor: Entity<Editor>,
803    state: gpui::Entity<State>,
804    load_credentials_task: Option<Task<()>>,
805}
806
807impl ConfigurationView {
808    const PLACEHOLDER_TEXT: &'static str = "sk-ant-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx";
809
810    fn new(state: gpui::Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
811        cx.observe(&state, |_, _, cx| {
812            cx.notify();
813        })
814        .detach();
815
816        let load_credentials_task = Some(cx.spawn({
817            let state = state.clone();
818            async move |this, cx| {
819                if let Some(task) = state
820                    .update(cx, |state, cx| state.authenticate(cx))
821                    .log_err()
822                {
823                    // We don't log an error, because "not signed in" is also an error.
824                    let _ = task.await;
825                }
826                this.update(cx, |this, cx| {
827                    this.load_credentials_task = None;
828                    cx.notify();
829                })
830                .log_err();
831            }
832        }));
833
834        Self {
835            api_key_editor: cx.new(|cx| {
836                let mut editor = Editor::single_line(window, cx);
837                editor.set_placeholder_text(Self::PLACEHOLDER_TEXT, cx);
838                editor
839            }),
840            state,
841            load_credentials_task,
842        }
843    }
844
845    fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
846        let api_key = self.api_key_editor.read(cx).text(cx);
847        if api_key.is_empty() {
848            return;
849        }
850
851        let state = self.state.clone();
852        cx.spawn_in(window, async move |_, cx| {
853            state
854                .update(cx, |state, cx| state.set_api_key(api_key, cx))?
855                .await
856        })
857        .detach_and_log_err(cx);
858
859        cx.notify();
860    }
861
862    fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context<Self>) {
863        self.api_key_editor
864            .update(cx, |editor, cx| editor.set_text("", window, cx));
865
866        let state = self.state.clone();
867        cx.spawn_in(window, async move |_, cx| {
868            state.update(cx, |state, cx| state.reset_api_key(cx))?.await
869        })
870        .detach_and_log_err(cx);
871
872        cx.notify();
873    }
874
875    fn render_api_key_editor(&self, cx: &mut Context<Self>) -> impl IntoElement {
876        let settings = ThemeSettings::get_global(cx);
877        let text_style = TextStyle {
878            color: cx.theme().colors().text,
879            font_family: settings.ui_font.family.clone(),
880            font_features: settings.ui_font.features.clone(),
881            font_fallbacks: settings.ui_font.fallbacks.clone(),
882            font_size: rems(0.875).into(),
883            font_weight: settings.ui_font.weight,
884            font_style: FontStyle::Normal,
885            line_height: relative(1.3),
886            white_space: WhiteSpace::Normal,
887            ..Default::default()
888        };
889        EditorElement::new(
890            &self.api_key_editor,
891            EditorStyle {
892                background: cx.theme().colors().editor_background,
893                local_player: cx.theme().players().local(),
894                text: text_style,
895                ..Default::default()
896            },
897        )
898    }
899
900    fn should_render_editor(&self, cx: &mut Context<Self>) -> bool {
901        !self.state.read(cx).is_authenticated()
902    }
903}
904
905impl Render for ConfigurationView {
906    fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
907        let env_var_set = self.state.read(cx).api_key_from_env;
908
909        if self.load_credentials_task.is_some() {
910            div().child(Label::new("Loading credentials...")).into_any()
911        } else if self.should_render_editor(cx) {
912            v_flex()
913                .size_full()
914                .on_action(cx.listener(Self::save_api_key))
915                .child(Label::new("To use Zed's assistant with Anthropic, you need to add an API key. Follow these steps:"))
916                .child(
917                    List::new()
918                        .child(
919                            InstructionListItem::new(
920                                "Create one by visiting",
921                                Some("Anthropic's settings"),
922                                Some("https://console.anthropic.com/settings/keys")
923                            )
924                        )
925                        .child(
926                            InstructionListItem::text_only("Paste your API key below and hit enter to start using the assistant")
927                        )
928                )
929                .child(
930                    h_flex()
931                        .w_full()
932                        .my_2()
933                        .px_2()
934                        .py_1()
935                        .bg(cx.theme().colors().editor_background)
936                        .border_1()
937                        .border_color(cx.theme().colors().border_variant)
938                        .rounded_sm()
939                        .child(self.render_api_key_editor(cx)),
940                )
941                .child(
942                    Label::new(
943                        format!("You can also assign the {ANTHROPIC_API_KEY_VAR} environment variable and restart Zed."),
944                    )
945                    .size(LabelSize::Small)
946                    .color(Color::Muted),
947                )
948                .into_any()
949        } else {
950            h_flex()
951                .size_full()
952                .justify_between()
953                .child(
954                    h_flex()
955                        .gap_1()
956                        .child(Icon::new(IconName::Check).color(Color::Success))
957                        .child(Label::new(if env_var_set {
958                            format!("API key set in {ANTHROPIC_API_KEY_VAR} environment variable.")
959                        } else {
960                            "API key configured.".to_string()
961                        })),
962                )
963                .child(
964                    Button::new("reset-key", "Reset key")
965                        .icon(Some(IconName::Trash))
966                        .icon_size(IconSize::Small)
967                        .icon_position(IconPosition::Start)
968                        .disabled(env_var_set)
969                        .when(env_var_set, |this| {
970                            this.tooltip(Tooltip::text(format!("To reset your API key, unset the {ANTHROPIC_API_KEY_VAR} environment variable.")))
971                        })
972                        .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx))),
973                )
974                .into_any()
975        }
976    }
977}