1use crate::AllLanguageModelSettings;
2use crate::ui::InstructionListItem;
3use anthropic::{AnthropicError, AnthropicModelMode, ContentDelta, Event, ResponseContent, Usage};
4use anyhow::{Context as _, Result, anyhow};
5use collections::{BTreeMap, HashMap};
6use credentials_provider::CredentialsProvider;
7use editor::{Editor, EditorElement, EditorStyle};
8use futures::Stream;
9use futures::{FutureExt, StreamExt, future::BoxFuture, stream::BoxStream};
10use gpui::{
11 AnyView, App, AsyncApp, Context, Entity, FontStyle, Subscription, Task, TextStyle, WhiteSpace,
12};
13use http_client::HttpClient;
14use language_model::{
15 AuthenticateError, LanguageModel, LanguageModelCacheConfiguration, LanguageModelId,
16 LanguageModelKnownError, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
17 LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest, MessageContent,
18 RateLimiter, Role,
19};
20use language_model::{LanguageModelCompletionEvent, LanguageModelToolUse, StopReason};
21use schemars::JsonSchema;
22use serde::{Deserialize, Serialize};
23use settings::{Settings, SettingsStore};
24use std::pin::Pin;
25use std::str::FromStr;
26use std::sync::Arc;
27use strum::IntoEnumIterator;
28use theme::ThemeSettings;
29use ui::{Icon, IconName, List, Tooltip, prelude::*};
30use util::{ResultExt, maybe};
31
32const PROVIDER_ID: &str = language_model::ANTHROPIC_PROVIDER_ID;
33const PROVIDER_NAME: &str = "Anthropic";
34
35#[derive(Default, Clone, Debug, PartialEq)]
36pub struct AnthropicSettings {
37 pub api_url: String,
38 /// Extend Zed's list of Anthropic models.
39 pub available_models: Vec<AvailableModel>,
40 pub needs_setting_migration: bool,
41}
42
43#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
44pub struct AvailableModel {
45 /// The model's name in the Anthropic API. e.g. claude-3-5-sonnet-latest, claude-3-opus-20240229, etc
46 pub name: String,
47 /// The model's name in Zed's UI, such as in the model selector dropdown menu in the assistant panel.
48 pub display_name: Option<String>,
49 /// The model's context window size.
50 pub max_tokens: usize,
51 /// A model `name` to substitute when calling tools, in case the primary model doesn't support tool calling.
52 pub tool_override: Option<String>,
53 /// Configuration of Anthropic's caching API.
54 pub cache_configuration: Option<LanguageModelCacheConfiguration>,
55 pub max_output_tokens: Option<u32>,
56 pub default_temperature: Option<f32>,
57 #[serde(default)]
58 pub extra_beta_headers: Vec<String>,
59 /// The model's mode (e.g. thinking)
60 pub mode: Option<ModelMode>,
61}
62
63#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize, JsonSchema)]
64#[serde(tag = "type", rename_all = "lowercase")]
65pub enum ModelMode {
66 #[default]
67 Default,
68 Thinking {
69 /// The maximum number of tokens to use for reasoning. Must be lower than the model's `max_output_tokens`.
70 budget_tokens: Option<u32>,
71 },
72}
73
74impl From<ModelMode> for AnthropicModelMode {
75 fn from(value: ModelMode) -> Self {
76 match value {
77 ModelMode::Default => AnthropicModelMode::Default,
78 ModelMode::Thinking { budget_tokens } => AnthropicModelMode::Thinking { budget_tokens },
79 }
80 }
81}
82
83impl From<AnthropicModelMode> for ModelMode {
84 fn from(value: AnthropicModelMode) -> Self {
85 match value {
86 AnthropicModelMode::Default => ModelMode::Default,
87 AnthropicModelMode::Thinking { budget_tokens } => ModelMode::Thinking { budget_tokens },
88 }
89 }
90}
91
92pub struct AnthropicLanguageModelProvider {
93 http_client: Arc<dyn HttpClient>,
94 state: gpui::Entity<State>,
95}
96
97const ANTHROPIC_API_KEY_VAR: &str = "ANTHROPIC_API_KEY";
98
99pub struct State {
100 api_key: Option<String>,
101 api_key_from_env: bool,
102 _subscription: Subscription,
103}
104
105impl State {
106 fn reset_api_key(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
107 let credentials_provider = <dyn CredentialsProvider>::global(cx);
108 let api_url = AllLanguageModelSettings::get_global(cx)
109 .anthropic
110 .api_url
111 .clone();
112 cx.spawn(async move |this, cx| {
113 credentials_provider
114 .delete_credentials(&api_url, &cx)
115 .await
116 .ok();
117 this.update(cx, |this, cx| {
118 this.api_key = None;
119 this.api_key_from_env = false;
120 cx.notify();
121 })
122 })
123 }
124
125 fn set_api_key(&mut self, api_key: String, cx: &mut Context<Self>) -> Task<Result<()>> {
126 let credentials_provider = <dyn CredentialsProvider>::global(cx);
127 let api_url = AllLanguageModelSettings::get_global(cx)
128 .anthropic
129 .api_url
130 .clone();
131 cx.spawn(async move |this, cx| {
132 credentials_provider
133 .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx)
134 .await
135 .ok();
136
137 this.update(cx, |this, cx| {
138 this.api_key = Some(api_key);
139 cx.notify();
140 })
141 })
142 }
143
144 fn is_authenticated(&self) -> bool {
145 self.api_key.is_some()
146 }
147
148 fn authenticate(&self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
149 if self.is_authenticated() {
150 return Task::ready(Ok(()));
151 }
152
153 let credentials_provider = <dyn CredentialsProvider>::global(cx);
154 let api_url = AllLanguageModelSettings::get_global(cx)
155 .anthropic
156 .api_url
157 .clone();
158
159 cx.spawn(async move |this, cx| {
160 let (api_key, from_env) = if let Ok(api_key) = std::env::var(ANTHROPIC_API_KEY_VAR) {
161 (api_key, true)
162 } else {
163 let (_, api_key) = credentials_provider
164 .read_credentials(&api_url, &cx)
165 .await?
166 .ok_or(AuthenticateError::CredentialsNotFound)?;
167 (
168 String::from_utf8(api_key).context("invalid {PROVIDER_NAME} API key")?,
169 false,
170 )
171 };
172
173 this.update(cx, |this, cx| {
174 this.api_key = Some(api_key);
175 this.api_key_from_env = from_env;
176 cx.notify();
177 })?;
178
179 Ok(())
180 })
181 }
182}
183
184impl AnthropicLanguageModelProvider {
185 pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
186 let state = cx.new(|cx| State {
187 api_key: None,
188 api_key_from_env: false,
189 _subscription: cx.observe_global::<SettingsStore>(|_, cx| {
190 cx.notify();
191 }),
192 });
193
194 Self { http_client, state }
195 }
196
197 fn create_language_model(&self, model: anthropic::Model) -> Arc<dyn LanguageModel> {
198 Arc::new(AnthropicModel {
199 id: LanguageModelId::from(model.id().to_string()),
200 model,
201 state: self.state.clone(),
202 http_client: self.http_client.clone(),
203 request_limiter: RateLimiter::new(4),
204 }) as Arc<dyn LanguageModel>
205 }
206}
207
208impl LanguageModelProviderState for AnthropicLanguageModelProvider {
209 type ObservableEntity = State;
210
211 fn observable_entity(&self) -> Option<gpui::Entity<Self::ObservableEntity>> {
212 Some(self.state.clone())
213 }
214}
215
216impl LanguageModelProvider for AnthropicLanguageModelProvider {
217 fn id(&self) -> LanguageModelProviderId {
218 LanguageModelProviderId(PROVIDER_ID.into())
219 }
220
221 fn name(&self) -> LanguageModelProviderName {
222 LanguageModelProviderName(PROVIDER_NAME.into())
223 }
224
225 fn icon(&self) -> IconName {
226 IconName::AiAnthropic
227 }
228
229 fn default_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
230 let model = anthropic::Model::default();
231 Some(Arc::new(AnthropicModel {
232 id: LanguageModelId::from(model.id().to_string()),
233 model,
234 state: self.state.clone(),
235 http_client: self.http_client.clone(),
236 request_limiter: RateLimiter::new(4),
237 }))
238 }
239
240 fn recommended_models(&self, _cx: &App) -> Vec<Arc<dyn LanguageModel>> {
241 [
242 anthropic::Model::Claude3_7Sonnet,
243 anthropic::Model::Claude3_7SonnetThinking,
244 ]
245 .into_iter()
246 .map(|model| self.create_language_model(model))
247 .collect()
248 }
249
250 fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
251 let mut models = BTreeMap::default();
252
253 // Add base models from anthropic::Model::iter()
254 for model in anthropic::Model::iter() {
255 if !matches!(model, anthropic::Model::Custom { .. }) {
256 models.insert(model.id().to_string(), model);
257 }
258 }
259
260 // Override with available models from settings
261 for model in AllLanguageModelSettings::get_global(cx)
262 .anthropic
263 .available_models
264 .iter()
265 {
266 models.insert(
267 model.name.clone(),
268 anthropic::Model::Custom {
269 name: model.name.clone(),
270 display_name: model.display_name.clone(),
271 max_tokens: model.max_tokens,
272 tool_override: model.tool_override.clone(),
273 cache_configuration: model.cache_configuration.as_ref().map(|config| {
274 anthropic::AnthropicModelCacheConfiguration {
275 max_cache_anchors: config.max_cache_anchors,
276 should_speculate: config.should_speculate,
277 min_total_token: config.min_total_token,
278 }
279 }),
280 max_output_tokens: model.max_output_tokens,
281 default_temperature: model.default_temperature,
282 extra_beta_headers: model.extra_beta_headers.clone(),
283 mode: model.mode.clone().unwrap_or_default().into(),
284 },
285 );
286 }
287
288 models
289 .into_values()
290 .map(|model| self.create_language_model(model))
291 .collect()
292 }
293
294 fn is_authenticated(&self, cx: &App) -> bool {
295 self.state.read(cx).is_authenticated()
296 }
297
298 fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
299 self.state.update(cx, |state, cx| state.authenticate(cx))
300 }
301
302 fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView {
303 cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx))
304 .into()
305 }
306
307 fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
308 self.state.update(cx, |state, cx| state.reset_api_key(cx))
309 }
310}
311
312pub struct AnthropicModel {
313 id: LanguageModelId,
314 model: anthropic::Model,
315 state: gpui::Entity<State>,
316 http_client: Arc<dyn HttpClient>,
317 request_limiter: RateLimiter,
318}
319
320pub fn count_anthropic_tokens(
321 request: LanguageModelRequest,
322 cx: &App,
323) -> BoxFuture<'static, Result<usize>> {
324 cx.background_spawn(async move {
325 let messages = request.messages;
326 let mut tokens_from_images = 0;
327 let mut string_messages = Vec::with_capacity(messages.len());
328
329 for message in messages {
330 use language_model::MessageContent;
331
332 let mut string_contents = String::new();
333
334 for content in message.content {
335 match content {
336 MessageContent::Text(text) => {
337 string_contents.push_str(&text);
338 }
339 MessageContent::Thinking { .. } => {
340 // Thinking blocks are not included in the input token count.
341 }
342 MessageContent::RedactedThinking(_) => {
343 // Thinking blocks are not included in the input token count.
344 }
345 MessageContent::Image(image) => {
346 tokens_from_images += image.estimate_tokens();
347 }
348 MessageContent::ToolUse(_tool_use) => {
349 // TODO: Estimate token usage from tool uses.
350 }
351 MessageContent::ToolResult(tool_result) => {
352 string_contents.push_str(&tool_result.content);
353 }
354 }
355 }
356
357 if !string_contents.is_empty() {
358 string_messages.push(tiktoken_rs::ChatCompletionRequestMessage {
359 role: match message.role {
360 Role::User => "user".into(),
361 Role::Assistant => "assistant".into(),
362 Role::System => "system".into(),
363 },
364 content: Some(string_contents),
365 name: None,
366 function_call: None,
367 });
368 }
369 }
370
371 // Tiktoken doesn't yet support these models, so we manually use the
372 // same tokenizer as GPT-4.
373 tiktoken_rs::num_tokens_from_messages("gpt-4", &string_messages)
374 .map(|tokens| tokens + tokens_from_images)
375 })
376 .boxed()
377}
378
379impl AnthropicModel {
380 fn stream_completion(
381 &self,
382 request: anthropic::Request,
383 cx: &AsyncApp,
384 ) -> BoxFuture<'static, Result<BoxStream<'static, Result<anthropic::Event, AnthropicError>>>>
385 {
386 let http_client = self.http_client.clone();
387
388 let Ok((api_key, api_url)) = cx.read_entity(&self.state, |state, cx| {
389 let settings = &AllLanguageModelSettings::get_global(cx).anthropic;
390 (state.api_key.clone(), settings.api_url.clone())
391 }) else {
392 return futures::future::ready(Err(anyhow!("App state dropped"))).boxed();
393 };
394
395 async move {
396 let api_key = api_key.ok_or_else(|| anyhow!("Missing Anthropic API Key"))?;
397 let request =
398 anthropic::stream_completion(http_client.as_ref(), &api_url, &api_key, request);
399 request.await.context("failed to stream completion")
400 }
401 .boxed()
402 }
403}
404
405impl LanguageModel for AnthropicModel {
406 fn id(&self) -> LanguageModelId {
407 self.id.clone()
408 }
409
410 fn name(&self) -> LanguageModelName {
411 LanguageModelName::from(self.model.display_name().to_string())
412 }
413
414 fn provider_id(&self) -> LanguageModelProviderId {
415 LanguageModelProviderId(PROVIDER_ID.into())
416 }
417
418 fn provider_name(&self) -> LanguageModelProviderName {
419 LanguageModelProviderName(PROVIDER_NAME.into())
420 }
421
422 fn supports_tools(&self) -> bool {
423 true
424 }
425
426 fn telemetry_id(&self) -> String {
427 format!("anthropic/{}", self.model.id())
428 }
429
430 fn api_key(&self, cx: &App) -> Option<String> {
431 self.state.read(cx).api_key.clone()
432 }
433
434 fn max_token_count(&self) -> usize {
435 self.model.max_token_count()
436 }
437
438 fn max_output_tokens(&self) -> Option<u32> {
439 Some(self.model.max_output_tokens())
440 }
441
442 fn count_tokens(
443 &self,
444 request: LanguageModelRequest,
445 cx: &App,
446 ) -> BoxFuture<'static, Result<usize>> {
447 count_anthropic_tokens(request, cx)
448 }
449
450 fn stream_completion(
451 &self,
452 request: LanguageModelRequest,
453 cx: &AsyncApp,
454 ) -> BoxFuture<'static, Result<BoxStream<'static, Result<LanguageModelCompletionEvent>>>> {
455 let request = into_anthropic(
456 request,
457 self.model.request_id().into(),
458 self.model.default_temperature(),
459 self.model.max_output_tokens(),
460 self.model.mode(),
461 );
462 let request = self.stream_completion(request, cx);
463 let future = self.request_limiter.stream(async move {
464 let response = request
465 .await
466 .map_err(|err| match err.downcast::<AnthropicError>() {
467 Ok(anthropic_err) => anthropic_err_to_anyhow(anthropic_err),
468 Err(err) => anyhow!(err),
469 })?;
470 Ok(map_to_language_model_completion_events(response))
471 });
472 async move { Ok(future.await?.boxed()) }.boxed()
473 }
474
475 fn cache_configuration(&self) -> Option<LanguageModelCacheConfiguration> {
476 self.model
477 .cache_configuration()
478 .map(|config| LanguageModelCacheConfiguration {
479 max_cache_anchors: config.max_cache_anchors,
480 should_speculate: config.should_speculate,
481 min_total_token: config.min_total_token,
482 })
483 }
484}
485
486pub fn into_anthropic(
487 request: LanguageModelRequest,
488 model: String,
489 default_temperature: f32,
490 max_output_tokens: u32,
491 mode: AnthropicModelMode,
492) -> anthropic::Request {
493 let mut new_messages: Vec<anthropic::Message> = Vec::new();
494 let mut system_message = String::new();
495
496 for message in request.messages {
497 if message.contents_empty() {
498 continue;
499 }
500
501 match message.role {
502 Role::User | Role::Assistant => {
503 let cache_control = if message.cache {
504 Some(anthropic::CacheControl {
505 cache_type: anthropic::CacheControlType::Ephemeral,
506 })
507 } else {
508 None
509 };
510 let anthropic_message_content: Vec<anthropic::RequestContent> = message
511 .content
512 .into_iter()
513 .filter_map(|content| match content {
514 MessageContent::Text(text) => {
515 if !text.is_empty() {
516 Some(anthropic::RequestContent::Text {
517 text,
518 cache_control,
519 })
520 } else {
521 None
522 }
523 }
524 MessageContent::Thinking {
525 text: thinking,
526 signature,
527 } => {
528 if !thinking.is_empty() {
529 Some(anthropic::RequestContent::Thinking {
530 thinking,
531 signature: signature.unwrap_or_default(),
532 cache_control,
533 })
534 } else {
535 None
536 }
537 }
538 MessageContent::RedactedThinking(data) => {
539 if !data.is_empty() {
540 Some(anthropic::RequestContent::RedactedThinking {
541 data: String::from_utf8(data).ok()?,
542 })
543 } else {
544 None
545 }
546 }
547 MessageContent::Image(image) => Some(anthropic::RequestContent::Image {
548 source: anthropic::ImageSource {
549 source_type: "base64".to_string(),
550 media_type: "image/png".to_string(),
551 data: image.source.to_string(),
552 },
553 cache_control,
554 }),
555 MessageContent::ToolUse(tool_use) => {
556 Some(anthropic::RequestContent::ToolUse {
557 id: tool_use.id.to_string(),
558 name: tool_use.name.to_string(),
559 input: tool_use.input,
560 cache_control,
561 })
562 }
563 MessageContent::ToolResult(tool_result) => {
564 Some(anthropic::RequestContent::ToolResult {
565 tool_use_id: tool_result.tool_use_id.to_string(),
566 is_error: tool_result.is_error,
567 content: tool_result.content.to_string(),
568 cache_control,
569 })
570 }
571 })
572 .collect();
573 let anthropic_role = match message.role {
574 Role::User => anthropic::Role::User,
575 Role::Assistant => anthropic::Role::Assistant,
576 Role::System => unreachable!("System role should never occur here"),
577 };
578 if let Some(last_message) = new_messages.last_mut() {
579 if last_message.role == anthropic_role {
580 last_message.content.extend(anthropic_message_content);
581 continue;
582 }
583 }
584 new_messages.push(anthropic::Message {
585 role: anthropic_role,
586 content: anthropic_message_content,
587 });
588 }
589 Role::System => {
590 if !system_message.is_empty() {
591 system_message.push_str("\n\n");
592 }
593 system_message.push_str(&message.string_contents());
594 }
595 }
596 }
597
598 anthropic::Request {
599 model,
600 messages: new_messages,
601 max_tokens: max_output_tokens,
602 system: if system_message.is_empty() {
603 None
604 } else {
605 Some(anthropic::StringOrContents::String(system_message))
606 },
607 thinking: if let AnthropicModelMode::Thinking { budget_tokens } = mode {
608 Some(anthropic::Thinking::Enabled { budget_tokens })
609 } else {
610 None
611 },
612 tools: request
613 .tools
614 .into_iter()
615 .map(|tool| anthropic::Tool {
616 name: tool.name,
617 description: tool.description,
618 input_schema: tool.input_schema,
619 })
620 .collect(),
621 tool_choice: None,
622 metadata: None,
623 stop_sequences: Vec::new(),
624 temperature: request.temperature.or(Some(default_temperature)),
625 top_k: None,
626 top_p: None,
627 }
628}
629
630pub fn map_to_language_model_completion_events(
631 events: Pin<Box<dyn Send + Stream<Item = Result<Event, AnthropicError>>>>,
632) -> impl Stream<Item = Result<LanguageModelCompletionEvent>> {
633 struct RawToolUse {
634 id: String,
635 name: String,
636 input_json: String,
637 }
638
639 struct State {
640 events: Pin<Box<dyn Send + Stream<Item = Result<Event, AnthropicError>>>>,
641 tool_uses_by_index: HashMap<usize, RawToolUse>,
642 usage: Usage,
643 stop_reason: StopReason,
644 }
645
646 futures::stream::unfold(
647 State {
648 events,
649 tool_uses_by_index: HashMap::default(),
650 usage: Usage::default(),
651 stop_reason: StopReason::EndTurn,
652 },
653 |mut state| async move {
654 while let Some(event) = state.events.next().await {
655 match event {
656 Ok(event) => match event {
657 Event::ContentBlockStart {
658 index,
659 content_block,
660 } => match content_block {
661 ResponseContent::Text { text } => {
662 return Some((
663 vec![Ok(LanguageModelCompletionEvent::Text(text))],
664 state,
665 ));
666 }
667 ResponseContent::Thinking { thinking } => {
668 return Some((
669 vec![Ok(LanguageModelCompletionEvent::Thinking {
670 text: thinking,
671 signature: None,
672 })],
673 state,
674 ));
675 }
676 ResponseContent::RedactedThinking { .. } => {
677 // Redacted thinking is encrypted and not accessible to the user, see:
678 // https://docs.anthropic.com/en/docs/build-with-claude/extended-thinking#suggestions-for-handling-redacted-thinking-in-production
679 }
680 ResponseContent::ToolUse { id, name, .. } => {
681 state.tool_uses_by_index.insert(
682 index,
683 RawToolUse {
684 id,
685 name,
686 input_json: String::new(),
687 },
688 );
689 }
690 },
691 Event::ContentBlockDelta { index, delta } => match delta {
692 ContentDelta::TextDelta { text } => {
693 return Some((
694 vec![Ok(LanguageModelCompletionEvent::Text(text))],
695 state,
696 ));
697 }
698 ContentDelta::ThinkingDelta { thinking } => {
699 return Some((
700 vec![Ok(LanguageModelCompletionEvent::Thinking {
701 text: thinking,
702 signature: None,
703 })],
704 state,
705 ));
706 }
707 ContentDelta::SignatureDelta { signature } => {
708 return Some((
709 vec![Ok(LanguageModelCompletionEvent::Thinking {
710 text: "".to_string(),
711 signature: Some(signature),
712 })],
713 state,
714 ));
715 }
716 ContentDelta::InputJsonDelta { partial_json } => {
717 if let Some(tool_use) = state.tool_uses_by_index.get_mut(&index) {
718 tool_use.input_json.push_str(&partial_json);
719 }
720 }
721 },
722 Event::ContentBlockStop { index } => {
723 if let Some(tool_use) = state.tool_uses_by_index.remove(&index) {
724 return Some((
725 vec![maybe!({
726 Ok(LanguageModelCompletionEvent::ToolUse(
727 LanguageModelToolUse {
728 id: tool_use.id.into(),
729 name: tool_use.name.into(),
730 input: if tool_use.input_json.is_empty() {
731 serde_json::Value::Object(
732 serde_json::Map::default(),
733 )
734 } else {
735 serde_json::Value::from_str(
736 &tool_use.input_json,
737 )
738 .map_err(|err| anyhow!(err))?
739 },
740 },
741 ))
742 })],
743 state,
744 ));
745 }
746 }
747 Event::MessageStart { message } => {
748 update_usage(&mut state.usage, &message.usage);
749 return Some((
750 vec![
751 Ok(LanguageModelCompletionEvent::UsageUpdate(convert_usage(
752 &state.usage,
753 ))),
754 Ok(LanguageModelCompletionEvent::StartMessage {
755 message_id: message.id,
756 }),
757 ],
758 state,
759 ));
760 }
761 Event::MessageDelta { delta, usage } => {
762 update_usage(&mut state.usage, &usage);
763 if let Some(stop_reason) = delta.stop_reason.as_deref() {
764 state.stop_reason = match stop_reason {
765 "end_turn" => StopReason::EndTurn,
766 "max_tokens" => StopReason::MaxTokens,
767 "tool_use" => StopReason::ToolUse,
768 _ => {
769 log::error!(
770 "Unexpected anthropic stop_reason: {stop_reason}"
771 );
772 StopReason::EndTurn
773 }
774 };
775 }
776 return Some((
777 vec![Ok(LanguageModelCompletionEvent::UsageUpdate(
778 convert_usage(&state.usage),
779 ))],
780 state,
781 ));
782 }
783 Event::MessageStop => {
784 return Some((
785 vec![Ok(LanguageModelCompletionEvent::Stop(state.stop_reason))],
786 state,
787 ));
788 }
789 Event::Error { error } => {
790 return Some((
791 vec![Err(anyhow!(AnthropicError::ApiError(error)))],
792 state,
793 ));
794 }
795 _ => {}
796 },
797 Err(err) => {
798 return Some((vec![Err(anthropic_err_to_anyhow(err))], state));
799 }
800 }
801 }
802
803 None
804 },
805 )
806 .flat_map(futures::stream::iter)
807}
808
809pub fn anthropic_err_to_anyhow(err: AnthropicError) -> anyhow::Error {
810 if let AnthropicError::ApiError(api_err) = &err {
811 if let Some(tokens) = api_err.match_window_exceeded() {
812 return anyhow!(LanguageModelKnownError::ContextWindowLimitExceeded { tokens });
813 }
814 }
815
816 anyhow!(err)
817}
818
819/// Updates usage data by preferring counts from `new`.
820fn update_usage(usage: &mut Usage, new: &Usage) {
821 if let Some(input_tokens) = new.input_tokens {
822 usage.input_tokens = Some(input_tokens);
823 }
824 if let Some(output_tokens) = new.output_tokens {
825 usage.output_tokens = Some(output_tokens);
826 }
827 if let Some(cache_creation_input_tokens) = new.cache_creation_input_tokens {
828 usage.cache_creation_input_tokens = Some(cache_creation_input_tokens);
829 }
830 if let Some(cache_read_input_tokens) = new.cache_read_input_tokens {
831 usage.cache_read_input_tokens = Some(cache_read_input_tokens);
832 }
833}
834
835fn convert_usage(usage: &Usage) -> language_model::TokenUsage {
836 language_model::TokenUsage {
837 input_tokens: usage.input_tokens.unwrap_or(0),
838 output_tokens: usage.output_tokens.unwrap_or(0),
839 cache_creation_input_tokens: usage.cache_creation_input_tokens.unwrap_or(0),
840 cache_read_input_tokens: usage.cache_read_input_tokens.unwrap_or(0),
841 }
842}
843
844struct ConfigurationView {
845 api_key_editor: Entity<Editor>,
846 state: gpui::Entity<State>,
847 load_credentials_task: Option<Task<()>>,
848}
849
850impl ConfigurationView {
851 const PLACEHOLDER_TEXT: &'static str = "sk-ant-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx";
852
853 fn new(state: gpui::Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
854 cx.observe(&state, |_, _, cx| {
855 cx.notify();
856 })
857 .detach();
858
859 let load_credentials_task = Some(cx.spawn({
860 let state = state.clone();
861 async move |this, cx| {
862 if let Some(task) = state
863 .update(cx, |state, cx| state.authenticate(cx))
864 .log_err()
865 {
866 // We don't log an error, because "not signed in" is also an error.
867 let _ = task.await;
868 }
869 this.update(cx, |this, cx| {
870 this.load_credentials_task = None;
871 cx.notify();
872 })
873 .log_err();
874 }
875 }));
876
877 Self {
878 api_key_editor: cx.new(|cx| {
879 let mut editor = Editor::single_line(window, cx);
880 editor.set_placeholder_text(Self::PLACEHOLDER_TEXT, cx);
881 editor
882 }),
883 state,
884 load_credentials_task,
885 }
886 }
887
888 fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
889 let api_key = self.api_key_editor.read(cx).text(cx);
890 if api_key.is_empty() {
891 return;
892 }
893
894 let state = self.state.clone();
895 cx.spawn_in(window, async move |_, cx| {
896 state
897 .update(cx, |state, cx| state.set_api_key(api_key, cx))?
898 .await
899 })
900 .detach_and_log_err(cx);
901
902 cx.notify();
903 }
904
905 fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context<Self>) {
906 self.api_key_editor
907 .update(cx, |editor, cx| editor.set_text("", window, cx));
908
909 let state = self.state.clone();
910 cx.spawn_in(window, async move |_, cx| {
911 state.update(cx, |state, cx| state.reset_api_key(cx))?.await
912 })
913 .detach_and_log_err(cx);
914
915 cx.notify();
916 }
917
918 fn render_api_key_editor(&self, cx: &mut Context<Self>) -> impl IntoElement {
919 let settings = ThemeSettings::get_global(cx);
920 let text_style = TextStyle {
921 color: cx.theme().colors().text,
922 font_family: settings.ui_font.family.clone(),
923 font_features: settings.ui_font.features.clone(),
924 font_fallbacks: settings.ui_font.fallbacks.clone(),
925 font_size: rems(0.875).into(),
926 font_weight: settings.ui_font.weight,
927 font_style: FontStyle::Normal,
928 line_height: relative(1.3),
929 white_space: WhiteSpace::Normal,
930 ..Default::default()
931 };
932 EditorElement::new(
933 &self.api_key_editor,
934 EditorStyle {
935 background: cx.theme().colors().editor_background,
936 local_player: cx.theme().players().local(),
937 text: text_style,
938 ..Default::default()
939 },
940 )
941 }
942
943 fn should_render_editor(&self, cx: &mut Context<Self>) -> bool {
944 !self.state.read(cx).is_authenticated()
945 }
946}
947
948impl Render for ConfigurationView {
949 fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
950 let env_var_set = self.state.read(cx).api_key_from_env;
951
952 if self.load_credentials_task.is_some() {
953 div().child(Label::new("Loading credentials...")).into_any()
954 } else if self.should_render_editor(cx) {
955 v_flex()
956 .size_full()
957 .on_action(cx.listener(Self::save_api_key))
958 .child(Label::new("To use Zed's assistant with Anthropic, you need to add an API key. Follow these steps:"))
959 .child(
960 List::new()
961 .child(
962 InstructionListItem::new(
963 "Create one by visiting",
964 Some("Anthropic's settings"),
965 Some("https://console.anthropic.com/settings/keys")
966 )
967 )
968 .child(
969 InstructionListItem::text_only("Paste your API key below and hit enter to start using the assistant")
970 )
971 )
972 .child(
973 h_flex()
974 .w_full()
975 .my_2()
976 .px_2()
977 .py_1()
978 .bg(cx.theme().colors().editor_background)
979 .border_1()
980 .border_color(cx.theme().colors().border)
981 .rounded_sm()
982 .child(self.render_api_key_editor(cx)),
983 )
984 .child(
985 Label::new(
986 format!("You can also assign the {ANTHROPIC_API_KEY_VAR} environment variable and restart Zed."),
987 )
988 .size(LabelSize::Small)
989 .color(Color::Muted),
990 )
991 .into_any()
992 } else {
993 h_flex()
994 .mt_1()
995 .p_1()
996 .justify_between()
997 .rounded_md()
998 .border_1()
999 .border_color(cx.theme().colors().border)
1000 .bg(cx.theme().colors().background)
1001 .child(
1002 h_flex()
1003 .gap_1()
1004 .child(Icon::new(IconName::Check).color(Color::Success))
1005 .child(Label::new(if env_var_set {
1006 format!("API key set in {ANTHROPIC_API_KEY_VAR} environment variable.")
1007 } else {
1008 "API key configured.".to_string()
1009 })),
1010 )
1011 .child(
1012 Button::new("reset-key", "Reset Key")
1013 .label_size(LabelSize::Small)
1014 .icon(Some(IconName::Trash))
1015 .icon_size(IconSize::Small)
1016 .icon_position(IconPosition::Start)
1017 .disabled(env_var_set)
1018 .when(env_var_set, |this| {
1019 this.tooltip(Tooltip::text(format!("To reset your API key, unset the {ANTHROPIC_API_KEY_VAR} environment variable.")))
1020 })
1021 .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx))),
1022 )
1023 .into_any()
1024 }
1025 }
1026}