anthropic.rs

  1use crate::AllLanguageModelSettings;
  2use crate::ui::InstructionListItem;
  3use anthropic::{AnthropicError, AnthropicModelMode, ContentDelta, Event, ResponseContent, Usage};
  4use anyhow::{Context as _, Result, anyhow};
  5use collections::{BTreeMap, HashMap};
  6use credentials_provider::CredentialsProvider;
  7use editor::{Editor, EditorElement, EditorStyle};
  8use futures::Stream;
  9use futures::{FutureExt, StreamExt, TryStreamExt as _, future::BoxFuture, stream::BoxStream};
 10use gpui::{
 11    AnyView, App, AsyncApp, Context, Entity, FontStyle, Subscription, Task, TextStyle, WhiteSpace,
 12};
 13use http_client::HttpClient;
 14use language_model::{
 15    AuthenticateError, LanguageModel, LanguageModelCacheConfiguration, LanguageModelId,
 16    LanguageModelName, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName,
 17    LanguageModelProviderState, LanguageModelRequest, MessageContent, RateLimiter, Role,
 18};
 19use language_model::{LanguageModelCompletionEvent, LanguageModelToolUse, StopReason};
 20use schemars::JsonSchema;
 21use serde::{Deserialize, Serialize};
 22use settings::{Settings, SettingsStore};
 23use std::pin::Pin;
 24use std::str::FromStr;
 25use std::sync::Arc;
 26use strum::IntoEnumIterator;
 27use theme::ThemeSettings;
 28use ui::{Icon, IconName, List, Tooltip, prelude::*};
 29use util::{ResultExt, maybe};
 30
 31const PROVIDER_ID: &str = language_model::ANTHROPIC_PROVIDER_ID;
 32const PROVIDER_NAME: &str = "Anthropic";
 33
 34#[derive(Default, Clone, Debug, PartialEq)]
 35pub struct AnthropicSettings {
 36    pub api_url: String,
 37    /// Extend Zed's list of Anthropic models.
 38    pub available_models: Vec<AvailableModel>,
 39    pub needs_setting_migration: bool,
 40}
 41
 42#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
 43pub struct AvailableModel {
 44    /// The model's name in the Anthropic API. e.g. claude-3-5-sonnet-latest, claude-3-opus-20240229, etc
 45    pub name: String,
 46    /// The model's name in Zed's UI, such as in the model selector dropdown menu in the assistant panel.
 47    pub display_name: Option<String>,
 48    /// The model's context window size.
 49    pub max_tokens: usize,
 50    /// A model `name` to substitute when calling tools, in case the primary model doesn't support tool calling.
 51    pub tool_override: Option<String>,
 52    /// Configuration of Anthropic's caching API.
 53    pub cache_configuration: Option<LanguageModelCacheConfiguration>,
 54    pub max_output_tokens: Option<u32>,
 55    pub default_temperature: Option<f32>,
 56    #[serde(default)]
 57    pub extra_beta_headers: Vec<String>,
 58    /// The model's mode (e.g. thinking)
 59    pub mode: Option<ModelMode>,
 60}
 61
 62#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize, JsonSchema)]
 63#[serde(tag = "type", rename_all = "lowercase")]
 64pub enum ModelMode {
 65    #[default]
 66    Default,
 67    Thinking {
 68        /// The maximum number of tokens to use for reasoning. Must be lower than the model's `max_output_tokens`.
 69        budget_tokens: Option<u32>,
 70    },
 71}
 72
 73impl From<ModelMode> for AnthropicModelMode {
 74    fn from(value: ModelMode) -> Self {
 75        match value {
 76            ModelMode::Default => AnthropicModelMode::Default,
 77            ModelMode::Thinking { budget_tokens } => AnthropicModelMode::Thinking { budget_tokens },
 78        }
 79    }
 80}
 81
 82impl From<AnthropicModelMode> for ModelMode {
 83    fn from(value: AnthropicModelMode) -> Self {
 84        match value {
 85            AnthropicModelMode::Default => ModelMode::Default,
 86            AnthropicModelMode::Thinking { budget_tokens } => ModelMode::Thinking { budget_tokens },
 87        }
 88    }
 89}
 90
 91pub struct AnthropicLanguageModelProvider {
 92    http_client: Arc<dyn HttpClient>,
 93    state: gpui::Entity<State>,
 94}
 95
 96const ANTHROPIC_API_KEY_VAR: &str = "ANTHROPIC_API_KEY";
 97
 98pub struct State {
 99    api_key: Option<String>,
100    api_key_from_env: bool,
101    _subscription: Subscription,
102}
103
104impl State {
105    fn reset_api_key(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
106        let credentials_provider = <dyn CredentialsProvider>::global(cx);
107        let api_url = AllLanguageModelSettings::get_global(cx)
108            .anthropic
109            .api_url
110            .clone();
111        cx.spawn(async move |this, cx| {
112            credentials_provider
113                .delete_credentials(&api_url, &cx)
114                .await
115                .ok();
116            this.update(cx, |this, cx| {
117                this.api_key = None;
118                this.api_key_from_env = false;
119                cx.notify();
120            })
121        })
122    }
123
124    fn set_api_key(&mut self, api_key: String, cx: &mut Context<Self>) -> Task<Result<()>> {
125        let credentials_provider = <dyn CredentialsProvider>::global(cx);
126        let api_url = AllLanguageModelSettings::get_global(cx)
127            .anthropic
128            .api_url
129            .clone();
130        cx.spawn(async move |this, cx| {
131            credentials_provider
132                .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx)
133                .await
134                .ok();
135
136            this.update(cx, |this, cx| {
137                this.api_key = Some(api_key);
138                cx.notify();
139            })
140        })
141    }
142
143    fn is_authenticated(&self) -> bool {
144        self.api_key.is_some()
145    }
146
147    fn authenticate(&self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
148        if self.is_authenticated() {
149            return Task::ready(Ok(()));
150        }
151
152        let credentials_provider = <dyn CredentialsProvider>::global(cx);
153        let api_url = AllLanguageModelSettings::get_global(cx)
154            .anthropic
155            .api_url
156            .clone();
157
158        cx.spawn(async move |this, cx| {
159            let (api_key, from_env) = if let Ok(api_key) = std::env::var(ANTHROPIC_API_KEY_VAR) {
160                (api_key, true)
161            } else {
162                let (_, api_key) = credentials_provider
163                    .read_credentials(&api_url, &cx)
164                    .await?
165                    .ok_or(AuthenticateError::CredentialsNotFound)?;
166                (
167                    String::from_utf8(api_key).context("invalid {PROVIDER_NAME} API key")?,
168                    false,
169                )
170            };
171
172            this.update(cx, |this, cx| {
173                this.api_key = Some(api_key);
174                this.api_key_from_env = from_env;
175                cx.notify();
176            })?;
177
178            Ok(())
179        })
180    }
181}
182
183impl AnthropicLanguageModelProvider {
184    pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
185        let state = cx.new(|cx| State {
186            api_key: None,
187            api_key_from_env: false,
188            _subscription: cx.observe_global::<SettingsStore>(|_, cx| {
189                cx.notify();
190            }),
191        });
192
193        Self { http_client, state }
194    }
195}
196
197impl LanguageModelProviderState for AnthropicLanguageModelProvider {
198    type ObservableEntity = State;
199
200    fn observable_entity(&self) -> Option<gpui::Entity<Self::ObservableEntity>> {
201        Some(self.state.clone())
202    }
203}
204
205impl LanguageModelProvider for AnthropicLanguageModelProvider {
206    fn id(&self) -> LanguageModelProviderId {
207        LanguageModelProviderId(PROVIDER_ID.into())
208    }
209
210    fn name(&self) -> LanguageModelProviderName {
211        LanguageModelProviderName(PROVIDER_NAME.into())
212    }
213
214    fn icon(&self) -> IconName {
215        IconName::AiAnthropic
216    }
217
218    fn default_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
219        let model = anthropic::Model::default();
220        Some(Arc::new(AnthropicModel {
221            id: LanguageModelId::from(model.id().to_string()),
222            model,
223            state: self.state.clone(),
224            http_client: self.http_client.clone(),
225            request_limiter: RateLimiter::new(4),
226        }))
227    }
228
229    fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
230        let mut models = BTreeMap::default();
231
232        // Add base models from anthropic::Model::iter()
233        for model in anthropic::Model::iter() {
234            if !matches!(model, anthropic::Model::Custom { .. }) {
235                models.insert(model.id().to_string(), model);
236            }
237        }
238
239        // Override with available models from settings
240        for model in AllLanguageModelSettings::get_global(cx)
241            .anthropic
242            .available_models
243            .iter()
244        {
245            models.insert(
246                model.name.clone(),
247                anthropic::Model::Custom {
248                    name: model.name.clone(),
249                    display_name: model.display_name.clone(),
250                    max_tokens: model.max_tokens,
251                    tool_override: model.tool_override.clone(),
252                    cache_configuration: model.cache_configuration.as_ref().map(|config| {
253                        anthropic::AnthropicModelCacheConfiguration {
254                            max_cache_anchors: config.max_cache_anchors,
255                            should_speculate: config.should_speculate,
256                            min_total_token: config.min_total_token,
257                        }
258                    }),
259                    max_output_tokens: model.max_output_tokens,
260                    default_temperature: model.default_temperature,
261                    extra_beta_headers: model.extra_beta_headers.clone(),
262                    mode: model.mode.clone().unwrap_or_default().into(),
263                },
264            );
265        }
266
267        models
268            .into_values()
269            .map(|model| {
270                Arc::new(AnthropicModel {
271                    id: LanguageModelId::from(model.id().to_string()),
272                    model,
273                    state: self.state.clone(),
274                    http_client: self.http_client.clone(),
275                    request_limiter: RateLimiter::new(4),
276                }) as Arc<dyn LanguageModel>
277            })
278            .collect()
279    }
280
281    fn is_authenticated(&self, cx: &App) -> bool {
282        self.state.read(cx).is_authenticated()
283    }
284
285    fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
286        self.state.update(cx, |state, cx| state.authenticate(cx))
287    }
288
289    fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView {
290        cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx))
291            .into()
292    }
293
294    fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
295        self.state.update(cx, |state, cx| state.reset_api_key(cx))
296    }
297}
298
299pub struct AnthropicModel {
300    id: LanguageModelId,
301    model: anthropic::Model,
302    state: gpui::Entity<State>,
303    http_client: Arc<dyn HttpClient>,
304    request_limiter: RateLimiter,
305}
306
307pub fn count_anthropic_tokens(
308    request: LanguageModelRequest,
309    cx: &App,
310) -> BoxFuture<'static, Result<usize>> {
311    cx.background_spawn(async move {
312        let messages = request.messages;
313        let mut tokens_from_images = 0;
314        let mut string_messages = Vec::with_capacity(messages.len());
315
316        for message in messages {
317            use language_model::MessageContent;
318
319            let mut string_contents = String::new();
320
321            for content in message.content {
322                match content {
323                    MessageContent::Text(text) => {
324                        string_contents.push_str(&text);
325                    }
326                    MessageContent::Image(image) => {
327                        tokens_from_images += image.estimate_tokens();
328                    }
329                    MessageContent::ToolUse(_tool_use) => {
330                        // TODO: Estimate token usage from tool uses.
331                    }
332                    MessageContent::ToolResult(tool_result) => {
333                        string_contents.push_str(&tool_result.content);
334                    }
335                }
336            }
337
338            if !string_contents.is_empty() {
339                string_messages.push(tiktoken_rs::ChatCompletionRequestMessage {
340                    role: match message.role {
341                        Role::User => "user".into(),
342                        Role::Assistant => "assistant".into(),
343                        Role::System => "system".into(),
344                    },
345                    content: Some(string_contents),
346                    name: None,
347                    function_call: None,
348                });
349            }
350        }
351
352        // Tiktoken doesn't yet support these models, so we manually use the
353        // same tokenizer as GPT-4.
354        tiktoken_rs::num_tokens_from_messages("gpt-4", &string_messages)
355            .map(|tokens| tokens + tokens_from_images)
356    })
357    .boxed()
358}
359
360impl AnthropicModel {
361    fn stream_completion(
362        &self,
363        request: anthropic::Request,
364        cx: &AsyncApp,
365    ) -> BoxFuture<'static, Result<BoxStream<'static, Result<anthropic::Event, AnthropicError>>>>
366    {
367        let http_client = self.http_client.clone();
368
369        let Ok((api_key, api_url)) = cx.read_entity(&self.state, |state, cx| {
370            let settings = &AllLanguageModelSettings::get_global(cx).anthropic;
371            (state.api_key.clone(), settings.api_url.clone())
372        }) else {
373            return futures::future::ready(Err(anyhow!("App state dropped"))).boxed();
374        };
375
376        async move {
377            let api_key = api_key.ok_or_else(|| anyhow!("Missing Anthropic API Key"))?;
378            let request =
379                anthropic::stream_completion(http_client.as_ref(), &api_url, &api_key, request);
380            request.await.context("failed to stream completion")
381        }
382        .boxed()
383    }
384}
385
386impl LanguageModel for AnthropicModel {
387    fn id(&self) -> LanguageModelId {
388        self.id.clone()
389    }
390
391    fn name(&self) -> LanguageModelName {
392        LanguageModelName::from(self.model.display_name().to_string())
393    }
394
395    fn provider_id(&self) -> LanguageModelProviderId {
396        LanguageModelProviderId(PROVIDER_ID.into())
397    }
398
399    fn provider_name(&self) -> LanguageModelProviderName {
400        LanguageModelProviderName(PROVIDER_NAME.into())
401    }
402
403    fn supports_tools(&self) -> bool {
404        true
405    }
406
407    fn telemetry_id(&self) -> String {
408        format!("anthropic/{}", self.model.id())
409    }
410
411    fn api_key(&self, cx: &App) -> Option<String> {
412        self.state.read(cx).api_key.clone()
413    }
414
415    fn max_token_count(&self) -> usize {
416        self.model.max_token_count()
417    }
418
419    fn max_output_tokens(&self) -> Option<u32> {
420        Some(self.model.max_output_tokens())
421    }
422
423    fn count_tokens(
424        &self,
425        request: LanguageModelRequest,
426        cx: &App,
427    ) -> BoxFuture<'static, Result<usize>> {
428        count_anthropic_tokens(request, cx)
429    }
430
431    fn stream_completion(
432        &self,
433        request: LanguageModelRequest,
434        cx: &AsyncApp,
435    ) -> BoxFuture<'static, Result<BoxStream<'static, Result<LanguageModelCompletionEvent>>>> {
436        let request = into_anthropic(
437            request,
438            self.model.request_id().into(),
439            self.model.default_temperature(),
440            self.model.max_output_tokens(),
441            self.model.mode(),
442        );
443        let request = self.stream_completion(request, cx);
444        let future = self.request_limiter.stream(async move {
445            let response = request.await.map_err(|err| anyhow!(err))?;
446            Ok(map_to_language_model_completion_events(response))
447        });
448        async move { Ok(future.await?.boxed()) }.boxed()
449    }
450
451    fn cache_configuration(&self) -> Option<LanguageModelCacheConfiguration> {
452        self.model
453            .cache_configuration()
454            .map(|config| LanguageModelCacheConfiguration {
455                max_cache_anchors: config.max_cache_anchors,
456                should_speculate: config.should_speculate,
457                min_total_token: config.min_total_token,
458            })
459    }
460
461    fn use_any_tool(
462        &self,
463        request: LanguageModelRequest,
464        tool_name: String,
465        tool_description: String,
466        input_schema: serde_json::Value,
467        cx: &AsyncApp,
468    ) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
469        let mut request = into_anthropic(
470            request,
471            self.model.tool_model_id().into(),
472            self.model.default_temperature(),
473            self.model.max_output_tokens(),
474            self.model.mode(),
475        );
476        request.tool_choice = Some(anthropic::ToolChoice::Tool {
477            name: tool_name.clone(),
478        });
479        request.tools = vec![anthropic::Tool {
480            name: tool_name.clone(),
481            description: tool_description,
482            input_schema,
483        }];
484
485        let response = self.stream_completion(request, cx);
486        self.request_limiter
487            .run(async move {
488                let response = response.await?;
489                Ok(anthropic::extract_tool_args_from_events(
490                    tool_name,
491                    Box::pin(response.map_err(|e| anyhow!(e))),
492                )
493                .await?
494                .boxed())
495            })
496            .boxed()
497    }
498}
499
500pub fn into_anthropic(
501    request: LanguageModelRequest,
502    model: String,
503    default_temperature: f32,
504    max_output_tokens: u32,
505    mode: AnthropicModelMode,
506) -> anthropic::Request {
507    let mut new_messages: Vec<anthropic::Message> = Vec::new();
508    let mut system_message = String::new();
509
510    for message in request.messages {
511        if message.contents_empty() {
512            continue;
513        }
514
515        match message.role {
516            Role::User | Role::Assistant => {
517                let cache_control = if message.cache {
518                    Some(anthropic::CacheControl {
519                        cache_type: anthropic::CacheControlType::Ephemeral,
520                    })
521                } else {
522                    None
523                };
524                let anthropic_message_content: Vec<anthropic::RequestContent> = message
525                    .content
526                    .into_iter()
527                    .filter_map(|content| match content {
528                        MessageContent::Text(text) => {
529                            if !text.is_empty() {
530                                Some(anthropic::RequestContent::Text {
531                                    text,
532                                    cache_control,
533                                })
534                            } else {
535                                None
536                            }
537                        }
538                        MessageContent::Image(image) => Some(anthropic::RequestContent::Image {
539                            source: anthropic::ImageSource {
540                                source_type: "base64".to_string(),
541                                media_type: "image/png".to_string(),
542                                data: image.source.to_string(),
543                            },
544                            cache_control,
545                        }),
546                        MessageContent::ToolUse(tool_use) => {
547                            Some(anthropic::RequestContent::ToolUse {
548                                id: tool_use.id.to_string(),
549                                name: tool_use.name.to_string(),
550                                input: tool_use.input,
551                                cache_control,
552                            })
553                        }
554                        MessageContent::ToolResult(tool_result) => {
555                            Some(anthropic::RequestContent::ToolResult {
556                                tool_use_id: tool_result.tool_use_id.to_string(),
557                                is_error: tool_result.is_error,
558                                content: tool_result.content.to_string(),
559                                cache_control,
560                            })
561                        }
562                    })
563                    .collect();
564                let anthropic_role = match message.role {
565                    Role::User => anthropic::Role::User,
566                    Role::Assistant => anthropic::Role::Assistant,
567                    Role::System => unreachable!("System role should never occur here"),
568                };
569                if let Some(last_message) = new_messages.last_mut() {
570                    if last_message.role == anthropic_role {
571                        last_message.content.extend(anthropic_message_content);
572                        continue;
573                    }
574                }
575                new_messages.push(anthropic::Message {
576                    role: anthropic_role,
577                    content: anthropic_message_content,
578                });
579            }
580            Role::System => {
581                if !system_message.is_empty() {
582                    system_message.push_str("\n\n");
583                }
584                system_message.push_str(&message.string_contents());
585            }
586        }
587    }
588
589    anthropic::Request {
590        model,
591        messages: new_messages,
592        max_tokens: max_output_tokens,
593        system: if system_message.is_empty() {
594            None
595        } else {
596            Some(anthropic::StringOrContents::String(system_message))
597        },
598        thinking: if let AnthropicModelMode::Thinking { budget_tokens } = mode {
599            Some(anthropic::Thinking::Enabled { budget_tokens })
600        } else {
601            None
602        },
603        tools: request
604            .tools
605            .into_iter()
606            .map(|tool| anthropic::Tool {
607                name: tool.name,
608                description: tool.description,
609                input_schema: tool.input_schema,
610            })
611            .collect(),
612        tool_choice: None,
613        metadata: None,
614        stop_sequences: Vec::new(),
615        temperature: request.temperature.or(Some(default_temperature)),
616        top_k: None,
617        top_p: None,
618    }
619}
620
621pub fn map_to_language_model_completion_events(
622    events: Pin<Box<dyn Send + Stream<Item = Result<Event, AnthropicError>>>>,
623) -> impl Stream<Item = Result<LanguageModelCompletionEvent>> {
624    struct RawToolUse {
625        id: String,
626        name: String,
627        input_json: String,
628    }
629
630    struct State {
631        events: Pin<Box<dyn Send + Stream<Item = Result<Event, AnthropicError>>>>,
632        tool_uses_by_index: HashMap<usize, RawToolUse>,
633        usage: Usage,
634        stop_reason: StopReason,
635    }
636
637    futures::stream::unfold(
638        State {
639            events,
640            tool_uses_by_index: HashMap::default(),
641            usage: Usage::default(),
642            stop_reason: StopReason::EndTurn,
643        },
644        |mut state| async move {
645            while let Some(event) = state.events.next().await {
646                match event {
647                    Ok(event) => match event {
648                        Event::ContentBlockStart {
649                            index,
650                            content_block,
651                        } => match content_block {
652                            ResponseContent::Text { text } => {
653                                return Some((
654                                    vec![Ok(LanguageModelCompletionEvent::Text(text))],
655                                    state,
656                                ));
657                            }
658                            ResponseContent::Thinking { thinking } => {
659                                return Some((
660                                    vec![Ok(LanguageModelCompletionEvent::Thinking(thinking))],
661                                    state,
662                                ));
663                            }
664                            ResponseContent::RedactedThinking { .. } => {
665                                // Redacted thinking is encrypted and not accessible to the user, see:
666                                // https://docs.anthropic.com/en/docs/build-with-claude/extended-thinking#suggestions-for-handling-redacted-thinking-in-production
667                            }
668                            ResponseContent::ToolUse { id, name, .. } => {
669                                state.tool_uses_by_index.insert(
670                                    index,
671                                    RawToolUse {
672                                        id,
673                                        name,
674                                        input_json: String::new(),
675                                    },
676                                );
677                            }
678                        },
679                        Event::ContentBlockDelta { index, delta } => match delta {
680                            ContentDelta::TextDelta { text } => {
681                                return Some((
682                                    vec![Ok(LanguageModelCompletionEvent::Text(text))],
683                                    state,
684                                ));
685                            }
686                            ContentDelta::ThinkingDelta { thinking } => {
687                                return Some((
688                                    vec![Ok(LanguageModelCompletionEvent::Thinking(thinking))],
689                                    state,
690                                ));
691                            }
692                            ContentDelta::SignatureDelta { .. } => {}
693                            ContentDelta::InputJsonDelta { partial_json } => {
694                                if let Some(tool_use) = state.tool_uses_by_index.get_mut(&index) {
695                                    tool_use.input_json.push_str(&partial_json);
696                                }
697                            }
698                        },
699                        Event::ContentBlockStop { index } => {
700                            if let Some(tool_use) = state.tool_uses_by_index.remove(&index) {
701                                return Some((
702                                    vec![maybe!({
703                                        Ok(LanguageModelCompletionEvent::ToolUse(
704                                            LanguageModelToolUse {
705                                                id: tool_use.id.into(),
706                                                name: tool_use.name.into(),
707                                                input: if tool_use.input_json.is_empty() {
708                                                    serde_json::Value::Object(
709                                                        serde_json::Map::default(),
710                                                    )
711                                                } else {
712                                                    serde_json::Value::from_str(
713                                                        &tool_use.input_json,
714                                                    )
715                                                    .map_err(|err| anyhow!(err))?
716                                                },
717                                            },
718                                        ))
719                                    })],
720                                    state,
721                                ));
722                            }
723                        }
724                        Event::MessageStart { message } => {
725                            update_usage(&mut state.usage, &message.usage);
726                            return Some((
727                                vec![
728                                    Ok(LanguageModelCompletionEvent::StartMessage {
729                                        message_id: message.id,
730                                    }),
731                                    Ok(LanguageModelCompletionEvent::UsageUpdate(convert_usage(
732                                        &state.usage,
733                                    ))),
734                                ],
735                                state,
736                            ));
737                        }
738                        Event::MessageDelta { delta, usage } => {
739                            update_usage(&mut state.usage, &usage);
740                            if let Some(stop_reason) = delta.stop_reason.as_deref() {
741                                state.stop_reason = match stop_reason {
742                                    "end_turn" => StopReason::EndTurn,
743                                    "max_tokens" => StopReason::MaxTokens,
744                                    "tool_use" => StopReason::ToolUse,
745                                    _ => {
746                                        log::error!(
747                                            "Unexpected anthropic stop_reason: {stop_reason}"
748                                        );
749                                        StopReason::EndTurn
750                                    }
751                                };
752                            }
753                            return Some((
754                                vec![Ok(LanguageModelCompletionEvent::UsageUpdate(
755                                    convert_usage(&state.usage),
756                                ))],
757                                state,
758                            ));
759                        }
760                        Event::MessageStop => {
761                            return Some((
762                                vec![Ok(LanguageModelCompletionEvent::Stop(state.stop_reason))],
763                                state,
764                            ));
765                        }
766                        Event::Error { error } => {
767                            return Some((
768                                vec![Err(anyhow!(AnthropicError::ApiError(error)))],
769                                state,
770                            ));
771                        }
772                        _ => {}
773                    },
774                    Err(err) => {
775                        return Some((vec![Err(anyhow!(err))], state));
776                    }
777                }
778            }
779
780            None
781        },
782    )
783    .flat_map(futures::stream::iter)
784}
785
786/// Updates usage data by preferring counts from `new`.
787fn update_usage(usage: &mut Usage, new: &Usage) {
788    if let Some(input_tokens) = new.input_tokens {
789        usage.input_tokens = Some(input_tokens);
790    }
791    if let Some(output_tokens) = new.output_tokens {
792        usage.output_tokens = Some(output_tokens);
793    }
794    if let Some(cache_creation_input_tokens) = new.cache_creation_input_tokens {
795        usage.cache_creation_input_tokens = Some(cache_creation_input_tokens);
796    }
797    if let Some(cache_read_input_tokens) = new.cache_read_input_tokens {
798        usage.cache_read_input_tokens = Some(cache_read_input_tokens);
799    }
800}
801
802fn convert_usage(usage: &Usage) -> language_model::TokenUsage {
803    language_model::TokenUsage {
804        input_tokens: usage.input_tokens.unwrap_or(0),
805        output_tokens: usage.output_tokens.unwrap_or(0),
806        cache_creation_input_tokens: usage.cache_creation_input_tokens.unwrap_or(0),
807        cache_read_input_tokens: usage.cache_read_input_tokens.unwrap_or(0),
808    }
809}
810
811struct ConfigurationView {
812    api_key_editor: Entity<Editor>,
813    state: gpui::Entity<State>,
814    load_credentials_task: Option<Task<()>>,
815}
816
817impl ConfigurationView {
818    const PLACEHOLDER_TEXT: &'static str = "sk-ant-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx";
819
820    fn new(state: gpui::Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
821        cx.observe(&state, |_, _, cx| {
822            cx.notify();
823        })
824        .detach();
825
826        let load_credentials_task = Some(cx.spawn({
827            let state = state.clone();
828            async move |this, cx| {
829                if let Some(task) = state
830                    .update(cx, |state, cx| state.authenticate(cx))
831                    .log_err()
832                {
833                    // We don't log an error, because "not signed in" is also an error.
834                    let _ = task.await;
835                }
836                this.update(cx, |this, cx| {
837                    this.load_credentials_task = None;
838                    cx.notify();
839                })
840                .log_err();
841            }
842        }));
843
844        Self {
845            api_key_editor: cx.new(|cx| {
846                let mut editor = Editor::single_line(window, cx);
847                editor.set_placeholder_text(Self::PLACEHOLDER_TEXT, cx);
848                editor
849            }),
850            state,
851            load_credentials_task,
852        }
853    }
854
855    fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
856        let api_key = self.api_key_editor.read(cx).text(cx);
857        if api_key.is_empty() {
858            return;
859        }
860
861        let state = self.state.clone();
862        cx.spawn_in(window, async move |_, cx| {
863            state
864                .update(cx, |state, cx| state.set_api_key(api_key, cx))?
865                .await
866        })
867        .detach_and_log_err(cx);
868
869        cx.notify();
870    }
871
872    fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context<Self>) {
873        self.api_key_editor
874            .update(cx, |editor, cx| editor.set_text("", window, cx));
875
876        let state = self.state.clone();
877        cx.spawn_in(window, async move |_, cx| {
878            state.update(cx, |state, cx| state.reset_api_key(cx))?.await
879        })
880        .detach_and_log_err(cx);
881
882        cx.notify();
883    }
884
885    fn render_api_key_editor(&self, cx: &mut Context<Self>) -> impl IntoElement {
886        let settings = ThemeSettings::get_global(cx);
887        let text_style = TextStyle {
888            color: cx.theme().colors().text,
889            font_family: settings.ui_font.family.clone(),
890            font_features: settings.ui_font.features.clone(),
891            font_fallbacks: settings.ui_font.fallbacks.clone(),
892            font_size: rems(0.875).into(),
893            font_weight: settings.ui_font.weight,
894            font_style: FontStyle::Normal,
895            line_height: relative(1.3),
896            white_space: WhiteSpace::Normal,
897            ..Default::default()
898        };
899        EditorElement::new(
900            &self.api_key_editor,
901            EditorStyle {
902                background: cx.theme().colors().editor_background,
903                local_player: cx.theme().players().local(),
904                text: text_style,
905                ..Default::default()
906            },
907        )
908    }
909
910    fn should_render_editor(&self, cx: &mut Context<Self>) -> bool {
911        !self.state.read(cx).is_authenticated()
912    }
913}
914
915impl Render for ConfigurationView {
916    fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
917        let env_var_set = self.state.read(cx).api_key_from_env;
918
919        if self.load_credentials_task.is_some() {
920            div().child(Label::new("Loading credentials...")).into_any()
921        } else if self.should_render_editor(cx) {
922            v_flex()
923                .size_full()
924                .on_action(cx.listener(Self::save_api_key))
925                .child(Label::new("To use Zed's assistant with Anthropic, you need to add an API key. Follow these steps:"))
926                .child(
927                    List::new()
928                        .child(
929                            InstructionListItem::new(
930                                "Create one by visiting",
931                                Some("Anthropic's settings"),
932                                Some("https://console.anthropic.com/settings/keys")
933                            )
934                        )
935                        .child(
936                            InstructionListItem::text_only("Paste your API key below and hit enter to start using the assistant")
937                        )
938                )
939                .child(
940                    h_flex()
941                        .w_full()
942                        .my_2()
943                        .px_2()
944                        .py_1()
945                        .bg(cx.theme().colors().editor_background)
946                        .border_1()
947                        .border_color(cx.theme().colors().border_variant)
948                        .rounded_sm()
949                        .child(self.render_api_key_editor(cx)),
950                )
951                .child(
952                    Label::new(
953                        format!("You can also assign the {ANTHROPIC_API_KEY_VAR} environment variable and restart Zed."),
954                    )
955                    .size(LabelSize::Small)
956                    .color(Color::Muted),
957                )
958                .into_any()
959        } else {
960            h_flex()
961                .size_full()
962                .justify_between()
963                .child(
964                    h_flex()
965                        .gap_1()
966                        .child(Icon::new(IconName::Check).color(Color::Success))
967                        .child(Label::new(if env_var_set {
968                            format!("API key set in {ANTHROPIC_API_KEY_VAR} environment variable.")
969                        } else {
970                            "API key configured.".to_string()
971                        })),
972                )
973                .child(
974                    Button::new("reset-key", "Reset key")
975                        .icon(Some(IconName::Trash))
976                        .icon_size(IconSize::Small)
977                        .icon_position(IconPosition::Start)
978                        .disabled(env_var_set)
979                        .when(env_var_set, |this| {
980                            this.tooltip(Tooltip::text(format!("To reset your API key, unset the {ANTHROPIC_API_KEY_VAR} environment variable.")))
981                        })
982                        .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx))),
983                )
984                .into_any()
985        }
986    }
987}