anthropic.rs

  1use crate::AllLanguageModelSettings;
  2use crate::ui::InstructionListItem;
  3use anthropic::{AnthropicError, AnthropicModelMode, ContentDelta, Event, ResponseContent, Usage};
  4use anyhow::{Context as _, Result, anyhow};
  5use collections::{BTreeMap, HashMap};
  6use credentials_provider::CredentialsProvider;
  7use editor::{Editor, EditorElement, EditorStyle};
  8use futures::Stream;
  9use futures::{FutureExt, StreamExt, future::BoxFuture, stream::BoxStream};
 10use gpui::{
 11    AnyView, App, AsyncApp, Context, Entity, FontStyle, Subscription, Task, TextStyle, WhiteSpace,
 12};
 13use http_client::HttpClient;
 14use language_model::{
 15    AuthenticateError, LanguageModel, LanguageModelCacheConfiguration, LanguageModelId,
 16    LanguageModelName, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName,
 17    LanguageModelProviderState, LanguageModelRequest, MessageContent, RateLimiter, Role,
 18};
 19use language_model::{LanguageModelCompletionEvent, LanguageModelToolUse, StopReason};
 20use schemars::JsonSchema;
 21use serde::{Deserialize, Serialize};
 22use settings::{Settings, SettingsStore};
 23use std::pin::Pin;
 24use std::str::FromStr;
 25use std::sync::Arc;
 26use strum::IntoEnumIterator;
 27use theme::ThemeSettings;
 28use ui::{Icon, IconName, List, Tooltip, prelude::*};
 29use util::{ResultExt, maybe};
 30
 31const PROVIDER_ID: &str = language_model::ANTHROPIC_PROVIDER_ID;
 32const PROVIDER_NAME: &str = "Anthropic";
 33
 34#[derive(Default, Clone, Debug, PartialEq)]
 35pub struct AnthropicSettings {
 36    pub api_url: String,
 37    /// Extend Zed's list of Anthropic models.
 38    pub available_models: Vec<AvailableModel>,
 39    pub needs_setting_migration: bool,
 40}
 41
 42#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
 43pub struct AvailableModel {
 44    /// The model's name in the Anthropic API. e.g. claude-3-5-sonnet-latest, claude-3-opus-20240229, etc
 45    pub name: String,
 46    /// The model's name in Zed's UI, such as in the model selector dropdown menu in the assistant panel.
 47    pub display_name: Option<String>,
 48    /// The model's context window size.
 49    pub max_tokens: usize,
 50    /// A model `name` to substitute when calling tools, in case the primary model doesn't support tool calling.
 51    pub tool_override: Option<String>,
 52    /// Configuration of Anthropic's caching API.
 53    pub cache_configuration: Option<LanguageModelCacheConfiguration>,
 54    pub max_output_tokens: Option<u32>,
 55    pub default_temperature: Option<f32>,
 56    #[serde(default)]
 57    pub extra_beta_headers: Vec<String>,
 58    /// The model's mode (e.g. thinking)
 59    pub mode: Option<ModelMode>,
 60}
 61
 62#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize, JsonSchema)]
 63#[serde(tag = "type", rename_all = "lowercase")]
 64pub enum ModelMode {
 65    #[default]
 66    Default,
 67    Thinking {
 68        /// The maximum number of tokens to use for reasoning. Must be lower than the model's `max_output_tokens`.
 69        budget_tokens: Option<u32>,
 70    },
 71}
 72
 73impl From<ModelMode> for AnthropicModelMode {
 74    fn from(value: ModelMode) -> Self {
 75        match value {
 76            ModelMode::Default => AnthropicModelMode::Default,
 77            ModelMode::Thinking { budget_tokens } => AnthropicModelMode::Thinking { budget_tokens },
 78        }
 79    }
 80}
 81
 82impl From<AnthropicModelMode> for ModelMode {
 83    fn from(value: AnthropicModelMode) -> Self {
 84        match value {
 85            AnthropicModelMode::Default => ModelMode::Default,
 86            AnthropicModelMode::Thinking { budget_tokens } => ModelMode::Thinking { budget_tokens },
 87        }
 88    }
 89}
 90
 91pub struct AnthropicLanguageModelProvider {
 92    http_client: Arc<dyn HttpClient>,
 93    state: gpui::Entity<State>,
 94}
 95
 96const ANTHROPIC_API_KEY_VAR: &str = "ANTHROPIC_API_KEY";
 97
 98pub struct State {
 99    api_key: Option<String>,
100    api_key_from_env: bool,
101    _subscription: Subscription,
102}
103
104impl State {
105    fn reset_api_key(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
106        let credentials_provider = <dyn CredentialsProvider>::global(cx);
107        let api_url = AllLanguageModelSettings::get_global(cx)
108            .anthropic
109            .api_url
110            .clone();
111        cx.spawn(async move |this, cx| {
112            credentials_provider
113                .delete_credentials(&api_url, &cx)
114                .await
115                .ok();
116            this.update(cx, |this, cx| {
117                this.api_key = None;
118                this.api_key_from_env = false;
119                cx.notify();
120            })
121        })
122    }
123
124    fn set_api_key(&mut self, api_key: String, cx: &mut Context<Self>) -> Task<Result<()>> {
125        let credentials_provider = <dyn CredentialsProvider>::global(cx);
126        let api_url = AllLanguageModelSettings::get_global(cx)
127            .anthropic
128            .api_url
129            .clone();
130        cx.spawn(async move |this, cx| {
131            credentials_provider
132                .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx)
133                .await
134                .ok();
135
136            this.update(cx, |this, cx| {
137                this.api_key = Some(api_key);
138                cx.notify();
139            })
140        })
141    }
142
143    fn is_authenticated(&self) -> bool {
144        self.api_key.is_some()
145    }
146
147    fn authenticate(&self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
148        if self.is_authenticated() {
149            return Task::ready(Ok(()));
150        }
151
152        let credentials_provider = <dyn CredentialsProvider>::global(cx);
153        let api_url = AllLanguageModelSettings::get_global(cx)
154            .anthropic
155            .api_url
156            .clone();
157
158        cx.spawn(async move |this, cx| {
159            let (api_key, from_env) = if let Ok(api_key) = std::env::var(ANTHROPIC_API_KEY_VAR) {
160                (api_key, true)
161            } else {
162                let (_, api_key) = credentials_provider
163                    .read_credentials(&api_url, &cx)
164                    .await?
165                    .ok_or(AuthenticateError::CredentialsNotFound)?;
166                (
167                    String::from_utf8(api_key).context("invalid {PROVIDER_NAME} API key")?,
168                    false,
169                )
170            };
171
172            this.update(cx, |this, cx| {
173                this.api_key = Some(api_key);
174                this.api_key_from_env = from_env;
175                cx.notify();
176            })?;
177
178            Ok(())
179        })
180    }
181}
182
183impl AnthropicLanguageModelProvider {
184    pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
185        let state = cx.new(|cx| State {
186            api_key: None,
187            api_key_from_env: false,
188            _subscription: cx.observe_global::<SettingsStore>(|_, cx| {
189                cx.notify();
190            }),
191        });
192
193        Self { http_client, state }
194    }
195
196    fn create_language_model(&self, model: anthropic::Model) -> Arc<dyn LanguageModel> {
197        Arc::new(AnthropicModel {
198            id: LanguageModelId::from(model.id().to_string()),
199            model,
200            state: self.state.clone(),
201            http_client: self.http_client.clone(),
202            request_limiter: RateLimiter::new(4),
203        }) as Arc<dyn LanguageModel>
204    }
205}
206
207impl LanguageModelProviderState for AnthropicLanguageModelProvider {
208    type ObservableEntity = State;
209
210    fn observable_entity(&self) -> Option<gpui::Entity<Self::ObservableEntity>> {
211        Some(self.state.clone())
212    }
213}
214
215impl LanguageModelProvider for AnthropicLanguageModelProvider {
216    fn id(&self) -> LanguageModelProviderId {
217        LanguageModelProviderId(PROVIDER_ID.into())
218    }
219
220    fn name(&self) -> LanguageModelProviderName {
221        LanguageModelProviderName(PROVIDER_NAME.into())
222    }
223
224    fn icon(&self) -> IconName {
225        IconName::AiAnthropic
226    }
227
228    fn default_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
229        let model = anthropic::Model::default();
230        Some(Arc::new(AnthropicModel {
231            id: LanguageModelId::from(model.id().to_string()),
232            model,
233            state: self.state.clone(),
234            http_client: self.http_client.clone(),
235            request_limiter: RateLimiter::new(4),
236        }))
237    }
238
239    fn recommended_models(&self, _cx: &App) -> Vec<Arc<dyn LanguageModel>> {
240        [
241            anthropic::Model::Claude3_7Sonnet,
242            anthropic::Model::Claude3_7SonnetThinking,
243        ]
244        .into_iter()
245        .map(|model| self.create_language_model(model))
246        .collect()
247    }
248
249    fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
250        let mut models = BTreeMap::default();
251
252        // Add base models from anthropic::Model::iter()
253        for model in anthropic::Model::iter() {
254            if !matches!(model, anthropic::Model::Custom { .. }) {
255                models.insert(model.id().to_string(), model);
256            }
257        }
258
259        // Override with available models from settings
260        for model in AllLanguageModelSettings::get_global(cx)
261            .anthropic
262            .available_models
263            .iter()
264        {
265            models.insert(
266                model.name.clone(),
267                anthropic::Model::Custom {
268                    name: model.name.clone(),
269                    display_name: model.display_name.clone(),
270                    max_tokens: model.max_tokens,
271                    tool_override: model.tool_override.clone(),
272                    cache_configuration: model.cache_configuration.as_ref().map(|config| {
273                        anthropic::AnthropicModelCacheConfiguration {
274                            max_cache_anchors: config.max_cache_anchors,
275                            should_speculate: config.should_speculate,
276                            min_total_token: config.min_total_token,
277                        }
278                    }),
279                    max_output_tokens: model.max_output_tokens,
280                    default_temperature: model.default_temperature,
281                    extra_beta_headers: model.extra_beta_headers.clone(),
282                    mode: model.mode.clone().unwrap_or_default().into(),
283                },
284            );
285        }
286
287        models
288            .into_values()
289            .map(|model| self.create_language_model(model))
290            .collect()
291    }
292
293    fn is_authenticated(&self, cx: &App) -> bool {
294        self.state.read(cx).is_authenticated()
295    }
296
297    fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
298        self.state.update(cx, |state, cx| state.authenticate(cx))
299    }
300
301    fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView {
302        cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx))
303            .into()
304    }
305
306    fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
307        self.state.update(cx, |state, cx| state.reset_api_key(cx))
308    }
309}
310
311pub struct AnthropicModel {
312    id: LanguageModelId,
313    model: anthropic::Model,
314    state: gpui::Entity<State>,
315    http_client: Arc<dyn HttpClient>,
316    request_limiter: RateLimiter,
317}
318
319pub fn count_anthropic_tokens(
320    request: LanguageModelRequest,
321    cx: &App,
322) -> BoxFuture<'static, Result<usize>> {
323    cx.background_spawn(async move {
324        let messages = request.messages;
325        let mut tokens_from_images = 0;
326        let mut string_messages = Vec::with_capacity(messages.len());
327
328        for message in messages {
329            use language_model::MessageContent;
330
331            let mut string_contents = String::new();
332
333            for content in message.content {
334                match content {
335                    MessageContent::Text(text) => {
336                        string_contents.push_str(&text);
337                    }
338                    MessageContent::Image(image) => {
339                        tokens_from_images += image.estimate_tokens();
340                    }
341                    MessageContent::ToolUse(_tool_use) => {
342                        // TODO: Estimate token usage from tool uses.
343                    }
344                    MessageContent::ToolResult(tool_result) => {
345                        string_contents.push_str(&tool_result.content);
346                    }
347                }
348            }
349
350            if !string_contents.is_empty() {
351                string_messages.push(tiktoken_rs::ChatCompletionRequestMessage {
352                    role: match message.role {
353                        Role::User => "user".into(),
354                        Role::Assistant => "assistant".into(),
355                        Role::System => "system".into(),
356                    },
357                    content: Some(string_contents),
358                    name: None,
359                    function_call: None,
360                });
361            }
362        }
363
364        // Tiktoken doesn't yet support these models, so we manually use the
365        // same tokenizer as GPT-4.
366        tiktoken_rs::num_tokens_from_messages("gpt-4", &string_messages)
367            .map(|tokens| tokens + tokens_from_images)
368    })
369    .boxed()
370}
371
372impl AnthropicModel {
373    fn stream_completion(
374        &self,
375        request: anthropic::Request,
376        cx: &AsyncApp,
377    ) -> BoxFuture<'static, Result<BoxStream<'static, Result<anthropic::Event, AnthropicError>>>>
378    {
379        let http_client = self.http_client.clone();
380
381        let Ok((api_key, api_url)) = cx.read_entity(&self.state, |state, cx| {
382            let settings = &AllLanguageModelSettings::get_global(cx).anthropic;
383            (state.api_key.clone(), settings.api_url.clone())
384        }) else {
385            return futures::future::ready(Err(anyhow!("App state dropped"))).boxed();
386        };
387
388        async move {
389            let api_key = api_key.ok_or_else(|| anyhow!("Missing Anthropic API Key"))?;
390            let request =
391                anthropic::stream_completion(http_client.as_ref(), &api_url, &api_key, request);
392            request.await.context("failed to stream completion")
393        }
394        .boxed()
395    }
396}
397
398impl LanguageModel for AnthropicModel {
399    fn id(&self) -> LanguageModelId {
400        self.id.clone()
401    }
402
403    fn name(&self) -> LanguageModelName {
404        LanguageModelName::from(self.model.display_name().to_string())
405    }
406
407    fn provider_id(&self) -> LanguageModelProviderId {
408        LanguageModelProviderId(PROVIDER_ID.into())
409    }
410
411    fn provider_name(&self) -> LanguageModelProviderName {
412        LanguageModelProviderName(PROVIDER_NAME.into())
413    }
414
415    fn supports_tools(&self) -> bool {
416        true
417    }
418
419    fn telemetry_id(&self) -> String {
420        format!("anthropic/{}", self.model.id())
421    }
422
423    fn api_key(&self, cx: &App) -> Option<String> {
424        self.state.read(cx).api_key.clone()
425    }
426
427    fn max_token_count(&self) -> usize {
428        self.model.max_token_count()
429    }
430
431    fn max_output_tokens(&self) -> Option<u32> {
432        Some(self.model.max_output_tokens())
433    }
434
435    fn count_tokens(
436        &self,
437        request: LanguageModelRequest,
438        cx: &App,
439    ) -> BoxFuture<'static, Result<usize>> {
440        count_anthropic_tokens(request, cx)
441    }
442
443    fn stream_completion(
444        &self,
445        request: LanguageModelRequest,
446        cx: &AsyncApp,
447    ) -> BoxFuture<'static, Result<BoxStream<'static, Result<LanguageModelCompletionEvent>>>> {
448        let request = into_anthropic(
449            request,
450            self.model.request_id().into(),
451            self.model.default_temperature(),
452            self.model.max_output_tokens(),
453            self.model.mode(),
454        );
455        let request = self.stream_completion(request, cx);
456        let future = self.request_limiter.stream(async move {
457            let response = request.await.map_err(|err| anyhow!(err))?;
458            Ok(map_to_language_model_completion_events(response))
459        });
460        async move { Ok(future.await?.boxed()) }.boxed()
461    }
462
463    fn cache_configuration(&self) -> Option<LanguageModelCacheConfiguration> {
464        self.model
465            .cache_configuration()
466            .map(|config| LanguageModelCacheConfiguration {
467                max_cache_anchors: config.max_cache_anchors,
468                should_speculate: config.should_speculate,
469                min_total_token: config.min_total_token,
470            })
471    }
472}
473
474pub fn into_anthropic(
475    request: LanguageModelRequest,
476    model: String,
477    default_temperature: f32,
478    max_output_tokens: u32,
479    mode: AnthropicModelMode,
480) -> anthropic::Request {
481    let mut new_messages: Vec<anthropic::Message> = Vec::new();
482    let mut system_message = String::new();
483
484    for message in request.messages {
485        if message.contents_empty() {
486            continue;
487        }
488
489        match message.role {
490            Role::User | Role::Assistant => {
491                let cache_control = if message.cache {
492                    Some(anthropic::CacheControl {
493                        cache_type: anthropic::CacheControlType::Ephemeral,
494                    })
495                } else {
496                    None
497                };
498                let anthropic_message_content: Vec<anthropic::RequestContent> = message
499                    .content
500                    .into_iter()
501                    .filter_map(|content| match content {
502                        MessageContent::Text(text) => {
503                            if !text.is_empty() {
504                                Some(anthropic::RequestContent::Text {
505                                    text,
506                                    cache_control,
507                                })
508                            } else {
509                                None
510                            }
511                        }
512                        MessageContent::Image(image) => Some(anthropic::RequestContent::Image {
513                            source: anthropic::ImageSource {
514                                source_type: "base64".to_string(),
515                                media_type: "image/png".to_string(),
516                                data: image.source.to_string(),
517                            },
518                            cache_control,
519                        }),
520                        MessageContent::ToolUse(tool_use) => {
521                            Some(anthropic::RequestContent::ToolUse {
522                                id: tool_use.id.to_string(),
523                                name: tool_use.name.to_string(),
524                                input: tool_use.input,
525                                cache_control,
526                            })
527                        }
528                        MessageContent::ToolResult(tool_result) => {
529                            Some(anthropic::RequestContent::ToolResult {
530                                tool_use_id: tool_result.tool_use_id.to_string(),
531                                is_error: tool_result.is_error,
532                                content: tool_result.content.to_string(),
533                                cache_control,
534                            })
535                        }
536                    })
537                    .collect();
538                let anthropic_role = match message.role {
539                    Role::User => anthropic::Role::User,
540                    Role::Assistant => anthropic::Role::Assistant,
541                    Role::System => unreachable!("System role should never occur here"),
542                };
543                if let Some(last_message) = new_messages.last_mut() {
544                    if last_message.role == anthropic_role {
545                        last_message.content.extend(anthropic_message_content);
546                        continue;
547                    }
548                }
549                new_messages.push(anthropic::Message {
550                    role: anthropic_role,
551                    content: anthropic_message_content,
552                });
553            }
554            Role::System => {
555                if !system_message.is_empty() {
556                    system_message.push_str("\n\n");
557                }
558                system_message.push_str(&message.string_contents());
559            }
560        }
561    }
562
563    anthropic::Request {
564        model,
565        messages: new_messages,
566        max_tokens: max_output_tokens,
567        system: if system_message.is_empty() {
568            None
569        } else {
570            Some(anthropic::StringOrContents::String(system_message))
571        },
572        thinking: if let AnthropicModelMode::Thinking { budget_tokens } = mode {
573            Some(anthropic::Thinking::Enabled { budget_tokens })
574        } else {
575            None
576        },
577        tools: request
578            .tools
579            .into_iter()
580            .map(|tool| anthropic::Tool {
581                name: tool.name,
582                description: tool.description,
583                input_schema: tool.input_schema,
584            })
585            .collect(),
586        tool_choice: None,
587        metadata: None,
588        stop_sequences: Vec::new(),
589        temperature: request.temperature.or(Some(default_temperature)),
590        top_k: None,
591        top_p: None,
592    }
593}
594
595pub fn map_to_language_model_completion_events(
596    events: Pin<Box<dyn Send + Stream<Item = Result<Event, AnthropicError>>>>,
597) -> impl Stream<Item = Result<LanguageModelCompletionEvent>> {
598    struct RawToolUse {
599        id: String,
600        name: String,
601        input_json: String,
602    }
603
604    struct State {
605        events: Pin<Box<dyn Send + Stream<Item = Result<Event, AnthropicError>>>>,
606        tool_uses_by_index: HashMap<usize, RawToolUse>,
607        usage: Usage,
608        stop_reason: StopReason,
609    }
610
611    futures::stream::unfold(
612        State {
613            events,
614            tool_uses_by_index: HashMap::default(),
615            usage: Usage::default(),
616            stop_reason: StopReason::EndTurn,
617        },
618        |mut state| async move {
619            while let Some(event) = state.events.next().await {
620                match event {
621                    Ok(event) => match event {
622                        Event::ContentBlockStart {
623                            index,
624                            content_block,
625                        } => match content_block {
626                            ResponseContent::Text { text } => {
627                                return Some((
628                                    vec![Ok(LanguageModelCompletionEvent::Text(text))],
629                                    state,
630                                ));
631                            }
632                            ResponseContent::Thinking { thinking } => {
633                                return Some((
634                                    vec![Ok(LanguageModelCompletionEvent::Thinking(thinking))],
635                                    state,
636                                ));
637                            }
638                            ResponseContent::RedactedThinking { .. } => {
639                                // Redacted thinking is encrypted and not accessible to the user, see:
640                                // https://docs.anthropic.com/en/docs/build-with-claude/extended-thinking#suggestions-for-handling-redacted-thinking-in-production
641                            }
642                            ResponseContent::ToolUse { id, name, .. } => {
643                                state.tool_uses_by_index.insert(
644                                    index,
645                                    RawToolUse {
646                                        id,
647                                        name,
648                                        input_json: String::new(),
649                                    },
650                                );
651                            }
652                        },
653                        Event::ContentBlockDelta { index, delta } => match delta {
654                            ContentDelta::TextDelta { text } => {
655                                return Some((
656                                    vec![Ok(LanguageModelCompletionEvent::Text(text))],
657                                    state,
658                                ));
659                            }
660                            ContentDelta::ThinkingDelta { thinking } => {
661                                return Some((
662                                    vec![Ok(LanguageModelCompletionEvent::Thinking(thinking))],
663                                    state,
664                                ));
665                            }
666                            ContentDelta::SignatureDelta { .. } => {}
667                            ContentDelta::InputJsonDelta { partial_json } => {
668                                if let Some(tool_use) = state.tool_uses_by_index.get_mut(&index) {
669                                    tool_use.input_json.push_str(&partial_json);
670                                }
671                            }
672                        },
673                        Event::ContentBlockStop { index } => {
674                            if let Some(tool_use) = state.tool_uses_by_index.remove(&index) {
675                                return Some((
676                                    vec![maybe!({
677                                        Ok(LanguageModelCompletionEvent::ToolUse(
678                                            LanguageModelToolUse {
679                                                id: tool_use.id.into(),
680                                                name: tool_use.name.into(),
681                                                input: if tool_use.input_json.is_empty() {
682                                                    serde_json::Value::Object(
683                                                        serde_json::Map::default(),
684                                                    )
685                                                } else {
686                                                    serde_json::Value::from_str(
687                                                        &tool_use.input_json,
688                                                    )
689                                                    .map_err(|err| anyhow!(err))?
690                                                },
691                                            },
692                                        ))
693                                    })],
694                                    state,
695                                ));
696                            }
697                        }
698                        Event::MessageStart { message } => {
699                            update_usage(&mut state.usage, &message.usage);
700                            return Some((
701                                vec![
702                                    Ok(LanguageModelCompletionEvent::StartMessage {
703                                        message_id: message.id,
704                                    }),
705                                    Ok(LanguageModelCompletionEvent::UsageUpdate(convert_usage(
706                                        &state.usage,
707                                    ))),
708                                ],
709                                state,
710                            ));
711                        }
712                        Event::MessageDelta { delta, usage } => {
713                            update_usage(&mut state.usage, &usage);
714                            if let Some(stop_reason) = delta.stop_reason.as_deref() {
715                                state.stop_reason = match stop_reason {
716                                    "end_turn" => StopReason::EndTurn,
717                                    "max_tokens" => StopReason::MaxTokens,
718                                    "tool_use" => StopReason::ToolUse,
719                                    _ => {
720                                        log::error!(
721                                            "Unexpected anthropic stop_reason: {stop_reason}"
722                                        );
723                                        StopReason::EndTurn
724                                    }
725                                };
726                            }
727                            return Some((
728                                vec![Ok(LanguageModelCompletionEvent::UsageUpdate(
729                                    convert_usage(&state.usage),
730                                ))],
731                                state,
732                            ));
733                        }
734                        Event::MessageStop => {
735                            return Some((
736                                vec![Ok(LanguageModelCompletionEvent::Stop(state.stop_reason))],
737                                state,
738                            ));
739                        }
740                        Event::Error { error } => {
741                            return Some((
742                                vec![Err(anyhow!(AnthropicError::ApiError(error)))],
743                                state,
744                            ));
745                        }
746                        _ => {}
747                    },
748                    Err(err) => {
749                        return Some((vec![Err(anyhow!(err))], state));
750                    }
751                }
752            }
753
754            None
755        },
756    )
757    .flat_map(futures::stream::iter)
758}
759
760/// Updates usage data by preferring counts from `new`.
761fn update_usage(usage: &mut Usage, new: &Usage) {
762    if let Some(input_tokens) = new.input_tokens {
763        usage.input_tokens = Some(input_tokens);
764    }
765    if let Some(output_tokens) = new.output_tokens {
766        usage.output_tokens = Some(output_tokens);
767    }
768    if let Some(cache_creation_input_tokens) = new.cache_creation_input_tokens {
769        usage.cache_creation_input_tokens = Some(cache_creation_input_tokens);
770    }
771    if let Some(cache_read_input_tokens) = new.cache_read_input_tokens {
772        usage.cache_read_input_tokens = Some(cache_read_input_tokens);
773    }
774}
775
776fn convert_usage(usage: &Usage) -> language_model::TokenUsage {
777    language_model::TokenUsage {
778        input_tokens: usage.input_tokens.unwrap_or(0),
779        output_tokens: usage.output_tokens.unwrap_or(0),
780        cache_creation_input_tokens: usage.cache_creation_input_tokens.unwrap_or(0),
781        cache_read_input_tokens: usage.cache_read_input_tokens.unwrap_or(0),
782    }
783}
784
785struct ConfigurationView {
786    api_key_editor: Entity<Editor>,
787    state: gpui::Entity<State>,
788    load_credentials_task: Option<Task<()>>,
789}
790
791impl ConfigurationView {
792    const PLACEHOLDER_TEXT: &'static str = "sk-ant-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx";
793
794    fn new(state: gpui::Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
795        cx.observe(&state, |_, _, cx| {
796            cx.notify();
797        })
798        .detach();
799
800        let load_credentials_task = Some(cx.spawn({
801            let state = state.clone();
802            async move |this, cx| {
803                if let Some(task) = state
804                    .update(cx, |state, cx| state.authenticate(cx))
805                    .log_err()
806                {
807                    // We don't log an error, because "not signed in" is also an error.
808                    let _ = task.await;
809                }
810                this.update(cx, |this, cx| {
811                    this.load_credentials_task = None;
812                    cx.notify();
813                })
814                .log_err();
815            }
816        }));
817
818        Self {
819            api_key_editor: cx.new(|cx| {
820                let mut editor = Editor::single_line(window, cx);
821                editor.set_placeholder_text(Self::PLACEHOLDER_TEXT, cx);
822                editor
823            }),
824            state,
825            load_credentials_task,
826        }
827    }
828
829    fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
830        let api_key = self.api_key_editor.read(cx).text(cx);
831        if api_key.is_empty() {
832            return;
833        }
834
835        let state = self.state.clone();
836        cx.spawn_in(window, async move |_, cx| {
837            state
838                .update(cx, |state, cx| state.set_api_key(api_key, cx))?
839                .await
840        })
841        .detach_and_log_err(cx);
842
843        cx.notify();
844    }
845
846    fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context<Self>) {
847        self.api_key_editor
848            .update(cx, |editor, cx| editor.set_text("", window, cx));
849
850        let state = self.state.clone();
851        cx.spawn_in(window, async move |_, cx| {
852            state.update(cx, |state, cx| state.reset_api_key(cx))?.await
853        })
854        .detach_and_log_err(cx);
855
856        cx.notify();
857    }
858
859    fn render_api_key_editor(&self, cx: &mut Context<Self>) -> impl IntoElement {
860        let settings = ThemeSettings::get_global(cx);
861        let text_style = TextStyle {
862            color: cx.theme().colors().text,
863            font_family: settings.ui_font.family.clone(),
864            font_features: settings.ui_font.features.clone(),
865            font_fallbacks: settings.ui_font.fallbacks.clone(),
866            font_size: rems(0.875).into(),
867            font_weight: settings.ui_font.weight,
868            font_style: FontStyle::Normal,
869            line_height: relative(1.3),
870            white_space: WhiteSpace::Normal,
871            ..Default::default()
872        };
873        EditorElement::new(
874            &self.api_key_editor,
875            EditorStyle {
876                background: cx.theme().colors().editor_background,
877                local_player: cx.theme().players().local(),
878                text: text_style,
879                ..Default::default()
880            },
881        )
882    }
883
884    fn should_render_editor(&self, cx: &mut Context<Self>) -> bool {
885        !self.state.read(cx).is_authenticated()
886    }
887}
888
889impl Render for ConfigurationView {
890    fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
891        let env_var_set = self.state.read(cx).api_key_from_env;
892
893        if self.load_credentials_task.is_some() {
894            div().child(Label::new("Loading credentials...")).into_any()
895        } else if self.should_render_editor(cx) {
896            v_flex()
897                .size_full()
898                .on_action(cx.listener(Self::save_api_key))
899                .child(Label::new("To use Zed's assistant with Anthropic, you need to add an API key. Follow these steps:"))
900                .child(
901                    List::new()
902                        .child(
903                            InstructionListItem::new(
904                                "Create one by visiting",
905                                Some("Anthropic's settings"),
906                                Some("https://console.anthropic.com/settings/keys")
907                            )
908                        )
909                        .child(
910                            InstructionListItem::text_only("Paste your API key below and hit enter to start using the assistant")
911                        )
912                )
913                .child(
914                    h_flex()
915                        .w_full()
916                        .my_2()
917                        .px_2()
918                        .py_1()
919                        .bg(cx.theme().colors().editor_background)
920                        .border_1()
921                        .border_color(cx.theme().colors().border_variant)
922                        .rounded_sm()
923                        .child(self.render_api_key_editor(cx)),
924                )
925                .child(
926                    Label::new(
927                        format!("You can also assign the {ANTHROPIC_API_KEY_VAR} environment variable and restart Zed."),
928                    )
929                    .size(LabelSize::Small)
930                    .color(Color::Muted),
931                )
932                .into_any()
933        } else {
934            h_flex()
935                .size_full()
936                .justify_between()
937                .child(
938                    h_flex()
939                        .gap_1()
940                        .child(Icon::new(IconName::Check).color(Color::Success))
941                        .child(Label::new(if env_var_set {
942                            format!("API key set in {ANTHROPIC_API_KEY_VAR} environment variable.")
943                        } else {
944                            "API key configured.".to_string()
945                        })),
946                )
947                .child(
948                    Button::new("reset-key", "Reset key")
949                        .icon(Some(IconName::Trash))
950                        .icon_size(IconSize::Small)
951                        .icon_position(IconPosition::Start)
952                        .disabled(env_var_set)
953                        .when(env_var_set, |this| {
954                            this.tooltip(Tooltip::text(format!("To reset your API key, unset the {ANTHROPIC_API_KEY_VAR} environment variable.")))
955                        })
956                        .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx))),
957                )
958                .into_any()
959        }
960    }
961}