anthropic.rs

   1use crate::api_key::ApiKeyState;
   2use crate::ui::InstructionListItem;
   3use anthropic::{
   4    ANTHROPIC_API_URL, AnthropicError, AnthropicModelMode, ContentDelta, Event, ResponseContent,
   5    ToolResultContent, ToolResultPart, Usage,
   6};
   7use anyhow::{Result, anyhow};
   8use collections::{BTreeMap, HashMap};
   9use editor::{Editor, EditorElement, EditorStyle};
  10use futures::{FutureExt, Stream, StreamExt, future, future::BoxFuture, stream::BoxStream};
  11use gpui::{AnyView, App, AsyncApp, Context, Entity, FontStyle, Task, TextStyle, WhiteSpace};
  12use http_client::HttpClient;
  13use language_model::{
  14    AuthenticateError, ConfigurationViewTargetAgent, LanguageModel,
  15    LanguageModelCacheConfiguration, LanguageModelCompletionError, LanguageModelId,
  16    LanguageModelName, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName,
  17    LanguageModelProviderState, LanguageModelRequest, LanguageModelToolChoice,
  18    LanguageModelToolResultContent, MessageContent, RateLimiter, Role,
  19};
  20use language_model::{LanguageModelCompletionEvent, LanguageModelToolUse, StopReason};
  21use schemars::JsonSchema;
  22use serde::{Deserialize, Serialize};
  23use settings::{Settings, SettingsStore};
  24use std::pin::Pin;
  25use std::str::FromStr;
  26use std::sync::{Arc, LazyLock};
  27use strum::IntoEnumIterator;
  28use theme::ThemeSettings;
  29use ui::{Icon, IconName, List, Tooltip, prelude::*};
  30use util::{ResultExt, truncate_and_trailoff};
  31use zed_env_vars::{EnvVar, env_var};
  32
  33const PROVIDER_ID: LanguageModelProviderId = language_model::ANTHROPIC_PROVIDER_ID;
  34const PROVIDER_NAME: LanguageModelProviderName = language_model::ANTHROPIC_PROVIDER_NAME;
  35
  36#[derive(Default, Clone, Debug, PartialEq)]
  37pub struct AnthropicSettings {
  38    pub api_url: String,
  39    /// Extend Zed's list of Anthropic models.
  40    pub available_models: Vec<AvailableModel>,
  41}
  42
  43#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
  44pub struct AvailableModel {
  45    /// The model's name in the Anthropic API. e.g. claude-3-5-sonnet-latest, claude-3-opus-20240229, etc
  46    pub name: String,
  47    /// The model's name in Zed's UI, such as in the model selector dropdown menu in the assistant panel.
  48    pub display_name: Option<String>,
  49    /// The model's context window size.
  50    pub max_tokens: u64,
  51    /// A model `name` to substitute when calling tools, in case the primary model doesn't support tool calling.
  52    pub tool_override: Option<String>,
  53    /// Configuration of Anthropic's caching API.
  54    pub cache_configuration: Option<LanguageModelCacheConfiguration>,
  55    pub max_output_tokens: Option<u64>,
  56    pub default_temperature: Option<f32>,
  57    #[serde(default)]
  58    pub extra_beta_headers: Vec<String>,
  59    /// The model's mode (e.g. thinking)
  60    pub mode: Option<ModelMode>,
  61}
  62
  63#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize, JsonSchema)]
  64#[serde(tag = "type", rename_all = "lowercase")]
  65pub enum ModelMode {
  66    #[default]
  67    Default,
  68    Thinking {
  69        /// The maximum number of tokens to use for reasoning. Must be lower than the model's `max_output_tokens`.
  70        budget_tokens: Option<u32>,
  71    },
  72}
  73
  74impl From<ModelMode> for AnthropicModelMode {
  75    fn from(value: ModelMode) -> Self {
  76        match value {
  77            ModelMode::Default => AnthropicModelMode::Default,
  78            ModelMode::Thinking { budget_tokens } => AnthropicModelMode::Thinking { budget_tokens },
  79        }
  80    }
  81}
  82
  83impl From<AnthropicModelMode> for ModelMode {
  84    fn from(value: AnthropicModelMode) -> Self {
  85        match value {
  86            AnthropicModelMode::Default => ModelMode::Default,
  87            AnthropicModelMode::Thinking { budget_tokens } => ModelMode::Thinking { budget_tokens },
  88        }
  89    }
  90}
  91
  92pub struct AnthropicLanguageModelProvider {
  93    http_client: Arc<dyn HttpClient>,
  94    state: gpui::Entity<State>,
  95}
  96
  97const API_KEY_ENV_VAR_NAME: &str = "ANTHROPIC_API_KEY";
  98static API_KEY_ENV_VAR: LazyLock<EnvVar> = env_var!(API_KEY_ENV_VAR_NAME);
  99
 100pub struct State {
 101    api_key_state: ApiKeyState,
 102}
 103
 104impl State {
 105    fn is_authenticated(&self) -> bool {
 106        self.api_key_state.has_key()
 107    }
 108
 109    fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
 110        let api_url = AnthropicLanguageModelProvider::api_url(cx);
 111        self.api_key_state
 112            .store(api_url, api_key, |this| &mut this.api_key_state, cx)
 113    }
 114
 115    fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
 116        let api_url = AnthropicLanguageModelProvider::api_url(cx);
 117        self.api_key_state.load_if_needed(
 118            api_url,
 119            &API_KEY_ENV_VAR,
 120            |this| &mut this.api_key_state,
 121            cx,
 122        )
 123    }
 124}
 125
 126impl AnthropicLanguageModelProvider {
 127    pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
 128        let state = cx.new(|cx| {
 129            cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
 130                let api_url = Self::api_url(cx);
 131                this.api_key_state.handle_url_change(
 132                    api_url,
 133                    &API_KEY_ENV_VAR,
 134                    |this| &mut this.api_key_state,
 135                    cx,
 136                );
 137                cx.notify();
 138            })
 139            .detach();
 140            State {
 141                api_key_state: ApiKeyState::new(Self::api_url(cx)),
 142            }
 143        });
 144
 145        Self { http_client, state }
 146    }
 147
 148    fn create_language_model(&self, model: anthropic::Model) -> Arc<dyn LanguageModel> {
 149        Arc::new(AnthropicModel {
 150            id: LanguageModelId::from(model.id().to_string()),
 151            model,
 152            state: self.state.clone(),
 153            http_client: self.http_client.clone(),
 154            request_limiter: RateLimiter::new(4),
 155        })
 156    }
 157
 158    fn settings(cx: &App) -> &AnthropicSettings {
 159        &crate::AllLanguageModelSettings::get_global(cx).anthropic
 160    }
 161
 162    fn api_url(cx: &App) -> SharedString {
 163        let api_url = &Self::settings(cx).api_url;
 164        if api_url.is_empty() {
 165            ANTHROPIC_API_URL.into()
 166        } else {
 167            SharedString::new(api_url.as_str())
 168        }
 169    }
 170}
 171
 172impl LanguageModelProviderState for AnthropicLanguageModelProvider {
 173    type ObservableEntity = State;
 174
 175    fn observable_entity(&self) -> Option<gpui::Entity<Self::ObservableEntity>> {
 176        Some(self.state.clone())
 177    }
 178}
 179
 180impl LanguageModelProvider for AnthropicLanguageModelProvider {
 181    fn id(&self) -> LanguageModelProviderId {
 182        PROVIDER_ID
 183    }
 184
 185    fn name(&self) -> LanguageModelProviderName {
 186        PROVIDER_NAME
 187    }
 188
 189    fn icon(&self) -> IconName {
 190        IconName::AiAnthropic
 191    }
 192
 193    fn default_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
 194        Some(self.create_language_model(anthropic::Model::default()))
 195    }
 196
 197    fn default_fast_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
 198        Some(self.create_language_model(anthropic::Model::default_fast()))
 199    }
 200
 201    fn recommended_models(&self, _cx: &App) -> Vec<Arc<dyn LanguageModel>> {
 202        [
 203            anthropic::Model::ClaudeSonnet4_5,
 204            anthropic::Model::ClaudeSonnet4_5Thinking,
 205        ]
 206        .into_iter()
 207        .map(|model| self.create_language_model(model))
 208        .collect()
 209    }
 210
 211    fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
 212        let mut models = BTreeMap::default();
 213
 214        // Add base models from anthropic::Model::iter()
 215        for model in anthropic::Model::iter() {
 216            if !matches!(model, anthropic::Model::Custom { .. }) {
 217                models.insert(model.id().to_string(), model);
 218            }
 219        }
 220
 221        // Override with available models from settings
 222        for model in &AnthropicLanguageModelProvider::settings(cx).available_models {
 223            models.insert(
 224                model.name.clone(),
 225                anthropic::Model::Custom {
 226                    name: model.name.clone(),
 227                    display_name: model.display_name.clone(),
 228                    max_tokens: model.max_tokens,
 229                    tool_override: model.tool_override.clone(),
 230                    cache_configuration: model.cache_configuration.as_ref().map(|config| {
 231                        anthropic::AnthropicModelCacheConfiguration {
 232                            max_cache_anchors: config.max_cache_anchors,
 233                            should_speculate: config.should_speculate,
 234                            min_total_token: config.min_total_token,
 235                        }
 236                    }),
 237                    max_output_tokens: model.max_output_tokens,
 238                    default_temperature: model.default_temperature,
 239                    extra_beta_headers: model.extra_beta_headers.clone(),
 240                    mode: model.mode.clone().unwrap_or_default().into(),
 241                },
 242            );
 243        }
 244
 245        models
 246            .into_values()
 247            .map(|model| self.create_language_model(model))
 248            .collect()
 249    }
 250
 251    fn is_authenticated(&self, cx: &App) -> bool {
 252        self.state.read(cx).is_authenticated()
 253    }
 254
 255    fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
 256        self.state.update(cx, |state, cx| state.authenticate(cx))
 257    }
 258
 259    fn configuration_view(
 260        &self,
 261        target_agent: ConfigurationViewTargetAgent,
 262        window: &mut Window,
 263        cx: &mut App,
 264    ) -> AnyView {
 265        cx.new(|cx| ConfigurationView::new(self.state.clone(), target_agent, window, cx))
 266            .into()
 267    }
 268
 269    fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
 270        self.state
 271            .update(cx, |state, cx| state.set_api_key(None, cx))
 272    }
 273}
 274
 275pub struct AnthropicModel {
 276    id: LanguageModelId,
 277    model: anthropic::Model,
 278    state: gpui::Entity<State>,
 279    http_client: Arc<dyn HttpClient>,
 280    request_limiter: RateLimiter,
 281}
 282
 283pub fn count_anthropic_tokens(
 284    request: LanguageModelRequest,
 285    cx: &App,
 286) -> BoxFuture<'static, Result<u64>> {
 287    cx.background_spawn(async move {
 288        let messages = request.messages;
 289        let mut tokens_from_images = 0;
 290        let mut string_messages = Vec::with_capacity(messages.len());
 291
 292        for message in messages {
 293            use language_model::MessageContent;
 294
 295            let mut string_contents = String::new();
 296
 297            for content in message.content {
 298                match content {
 299                    MessageContent::Text(text) => {
 300                        string_contents.push_str(&text);
 301                    }
 302                    MessageContent::Thinking { .. } => {
 303                        // Thinking blocks are not included in the input token count.
 304                    }
 305                    MessageContent::RedactedThinking(_) => {
 306                        // Thinking blocks are not included in the input token count.
 307                    }
 308                    MessageContent::Image(image) => {
 309                        tokens_from_images += image.estimate_tokens();
 310                    }
 311                    MessageContent::ToolUse(_tool_use) => {
 312                        // TODO: Estimate token usage from tool uses.
 313                    }
 314                    MessageContent::ToolResult(tool_result) => match &tool_result.content {
 315                        LanguageModelToolResultContent::Text(text) => {
 316                            string_contents.push_str(text);
 317                        }
 318                        LanguageModelToolResultContent::Image(image) => {
 319                            tokens_from_images += image.estimate_tokens();
 320                        }
 321                    },
 322                }
 323            }
 324
 325            if !string_contents.is_empty() {
 326                string_messages.push(tiktoken_rs::ChatCompletionRequestMessage {
 327                    role: match message.role {
 328                        Role::User => "user".into(),
 329                        Role::Assistant => "assistant".into(),
 330                        Role::System => "system".into(),
 331                    },
 332                    content: Some(string_contents),
 333                    name: None,
 334                    function_call: None,
 335                });
 336            }
 337        }
 338
 339        // Tiktoken doesn't yet support these models, so we manually use the
 340        // same tokenizer as GPT-4.
 341        tiktoken_rs::num_tokens_from_messages("gpt-4", &string_messages)
 342            .map(|tokens| (tokens + tokens_from_images) as u64)
 343    })
 344    .boxed()
 345}
 346
 347impl AnthropicModel {
 348    fn stream_completion(
 349        &self,
 350        request: anthropic::Request,
 351        cx: &AsyncApp,
 352    ) -> BoxFuture<
 353        'static,
 354        Result<
 355            BoxStream<'static, Result<anthropic::Event, AnthropicError>>,
 356            LanguageModelCompletionError,
 357        >,
 358    > {
 359        let http_client = self.http_client.clone();
 360
 361        let Ok((api_key, api_url)) = self.state.read_with(cx, |state, cx| {
 362            let api_url = AnthropicLanguageModelProvider::api_url(cx);
 363            (state.api_key_state.key(&api_url), api_url)
 364        }) else {
 365            return future::ready(Err(anyhow!("App state dropped").into())).boxed();
 366        };
 367
 368        let beta_headers = self.model.beta_headers();
 369
 370        async move {
 371            let Some(api_key) = api_key else {
 372                return Err(LanguageModelCompletionError::NoApiKey {
 373                    provider: PROVIDER_NAME,
 374                });
 375            };
 376            let request = anthropic::stream_completion(
 377                http_client.as_ref(),
 378                &api_url,
 379                &api_key,
 380                request,
 381                beta_headers,
 382            );
 383            request.await.map_err(Into::into)
 384        }
 385        .boxed()
 386    }
 387}
 388
 389impl LanguageModel for AnthropicModel {
 390    fn id(&self) -> LanguageModelId {
 391        self.id.clone()
 392    }
 393
 394    fn name(&self) -> LanguageModelName {
 395        LanguageModelName::from(self.model.display_name().to_string())
 396    }
 397
 398    fn provider_id(&self) -> LanguageModelProviderId {
 399        PROVIDER_ID
 400    }
 401
 402    fn provider_name(&self) -> LanguageModelProviderName {
 403        PROVIDER_NAME
 404    }
 405
 406    fn supports_tools(&self) -> bool {
 407        true
 408    }
 409
 410    fn supports_images(&self) -> bool {
 411        true
 412    }
 413
 414    fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
 415        match choice {
 416            LanguageModelToolChoice::Auto
 417            | LanguageModelToolChoice::Any
 418            | LanguageModelToolChoice::None => true,
 419        }
 420    }
 421
 422    fn telemetry_id(&self) -> String {
 423        format!("anthropic/{}", self.model.id())
 424    }
 425
 426    fn api_key(&self, cx: &App) -> Option<String> {
 427        self.state.read_with(cx, |state, cx| {
 428            let api_url = AnthropicLanguageModelProvider::api_url(cx);
 429            state.api_key_state.key(&api_url).map(|key| key.to_string())
 430        })
 431    }
 432
 433    fn max_token_count(&self) -> u64 {
 434        self.model.max_token_count()
 435    }
 436
 437    fn max_output_tokens(&self) -> Option<u64> {
 438        Some(self.model.max_output_tokens())
 439    }
 440
 441    fn count_tokens(
 442        &self,
 443        request: LanguageModelRequest,
 444        cx: &App,
 445    ) -> BoxFuture<'static, Result<u64>> {
 446        count_anthropic_tokens(request, cx)
 447    }
 448
 449    fn stream_completion(
 450        &self,
 451        request: LanguageModelRequest,
 452        cx: &AsyncApp,
 453    ) -> BoxFuture<
 454        'static,
 455        Result<
 456            BoxStream<'static, Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>,
 457            LanguageModelCompletionError,
 458        >,
 459    > {
 460        let request = into_anthropic(
 461            request,
 462            self.model.request_id().into(),
 463            self.model.default_temperature(),
 464            self.model.max_output_tokens(),
 465            self.model.mode(),
 466        );
 467        let request = self.stream_completion(request, cx);
 468        let future = self.request_limiter.stream(async move {
 469            let response = request.await?;
 470            Ok(AnthropicEventMapper::new().map_stream(response))
 471        });
 472        async move { Ok(future.await?.boxed()) }.boxed()
 473    }
 474
 475    fn cache_configuration(&self) -> Option<LanguageModelCacheConfiguration> {
 476        self.model
 477            .cache_configuration()
 478            .map(|config| LanguageModelCacheConfiguration {
 479                max_cache_anchors: config.max_cache_anchors,
 480                should_speculate: config.should_speculate,
 481                min_total_token: config.min_total_token,
 482            })
 483    }
 484}
 485
 486pub fn into_anthropic(
 487    request: LanguageModelRequest,
 488    model: String,
 489    default_temperature: f32,
 490    max_output_tokens: u64,
 491    mode: AnthropicModelMode,
 492) -> anthropic::Request {
 493    let mut new_messages: Vec<anthropic::Message> = Vec::new();
 494    let mut system_message = String::new();
 495
 496    for message in request.messages {
 497        if message.contents_empty() {
 498            continue;
 499        }
 500
 501        match message.role {
 502            Role::User | Role::Assistant => {
 503                let mut anthropic_message_content: Vec<anthropic::RequestContent> = message
 504                    .content
 505                    .into_iter()
 506                    .filter_map(|content| match content {
 507                        MessageContent::Text(text) => {
 508                            let text = if text.chars().last().is_some_and(|c| c.is_whitespace()) {
 509                                text.trim_end().to_string()
 510                            } else {
 511                                text
 512                            };
 513                            if !text.is_empty() {
 514                                Some(anthropic::RequestContent::Text {
 515                                    text,
 516                                    cache_control: None,
 517                                })
 518                            } else {
 519                                None
 520                            }
 521                        }
 522                        MessageContent::Thinking {
 523                            text: thinking,
 524                            signature,
 525                        } => {
 526                            if !thinking.is_empty() {
 527                                Some(anthropic::RequestContent::Thinking {
 528                                    thinking,
 529                                    signature: signature.unwrap_or_default(),
 530                                    cache_control: None,
 531                                })
 532                            } else {
 533                                None
 534                            }
 535                        }
 536                        MessageContent::RedactedThinking(data) => {
 537                            if !data.is_empty() {
 538                                Some(anthropic::RequestContent::RedactedThinking { data })
 539                            } else {
 540                                None
 541                            }
 542                        }
 543                        MessageContent::Image(image) => Some(anthropic::RequestContent::Image {
 544                            source: anthropic::ImageSource {
 545                                source_type: "base64".to_string(),
 546                                media_type: "image/png".to_string(),
 547                                data: image.source.to_string(),
 548                            },
 549                            cache_control: None,
 550                        }),
 551                        MessageContent::ToolUse(tool_use) => {
 552                            Some(anthropic::RequestContent::ToolUse {
 553                                id: tool_use.id.to_string(),
 554                                name: tool_use.name.to_string(),
 555                                input: tool_use.input,
 556                                cache_control: None,
 557                            })
 558                        }
 559                        MessageContent::ToolResult(tool_result) => {
 560                            Some(anthropic::RequestContent::ToolResult {
 561                                tool_use_id: tool_result.tool_use_id.to_string(),
 562                                is_error: tool_result.is_error,
 563                                content: match tool_result.content {
 564                                    LanguageModelToolResultContent::Text(text) => {
 565                                        ToolResultContent::Plain(text.to_string())
 566                                    }
 567                                    LanguageModelToolResultContent::Image(image) => {
 568                                        ToolResultContent::Multipart(vec![ToolResultPart::Image {
 569                                            source: anthropic::ImageSource {
 570                                                source_type: "base64".to_string(),
 571                                                media_type: "image/png".to_string(),
 572                                                data: image.source.to_string(),
 573                                            },
 574                                        }])
 575                                    }
 576                                },
 577                                cache_control: None,
 578                            })
 579                        }
 580                    })
 581                    .collect();
 582                let anthropic_role = match message.role {
 583                    Role::User => anthropic::Role::User,
 584                    Role::Assistant => anthropic::Role::Assistant,
 585                    Role::System => unreachable!("System role should never occur here"),
 586                };
 587                if let Some(last_message) = new_messages.last_mut()
 588                    && last_message.role == anthropic_role
 589                {
 590                    last_message.content.extend(anthropic_message_content);
 591                    continue;
 592                }
 593
 594                // Mark the last segment of the message as cached
 595                if message.cache {
 596                    let cache_control_value = Some(anthropic::CacheControl {
 597                        cache_type: anthropic::CacheControlType::Ephemeral,
 598                    });
 599                    for message_content in anthropic_message_content.iter_mut().rev() {
 600                        match message_content {
 601                            anthropic::RequestContent::RedactedThinking { .. } => {
 602                                // Caching is not possible, fallback to next message
 603                            }
 604                            anthropic::RequestContent::Text { cache_control, .. }
 605                            | anthropic::RequestContent::Thinking { cache_control, .. }
 606                            | anthropic::RequestContent::Image { cache_control, .. }
 607                            | anthropic::RequestContent::ToolUse { cache_control, .. }
 608                            | anthropic::RequestContent::ToolResult { cache_control, .. } => {
 609                                *cache_control = cache_control_value;
 610                                break;
 611                            }
 612                        }
 613                    }
 614                }
 615
 616                new_messages.push(anthropic::Message {
 617                    role: anthropic_role,
 618                    content: anthropic_message_content,
 619                });
 620            }
 621            Role::System => {
 622                if !system_message.is_empty() {
 623                    system_message.push_str("\n\n");
 624                }
 625                system_message.push_str(&message.string_contents());
 626            }
 627        }
 628    }
 629
 630    anthropic::Request {
 631        model,
 632        messages: new_messages,
 633        max_tokens: max_output_tokens,
 634        system: if system_message.is_empty() {
 635            None
 636        } else {
 637            Some(anthropic::StringOrContents::String(system_message))
 638        },
 639        thinking: if request.thinking_allowed
 640            && let AnthropicModelMode::Thinking { budget_tokens } = mode
 641        {
 642            Some(anthropic::Thinking::Enabled { budget_tokens })
 643        } else {
 644            None
 645        },
 646        tools: request
 647            .tools
 648            .into_iter()
 649            .map(|tool| anthropic::Tool {
 650                name: tool.name,
 651                description: tool.description,
 652                input_schema: tool.input_schema,
 653            })
 654            .collect(),
 655        tool_choice: request.tool_choice.map(|choice| match choice {
 656            LanguageModelToolChoice::Auto => anthropic::ToolChoice::Auto,
 657            LanguageModelToolChoice::Any => anthropic::ToolChoice::Any,
 658            LanguageModelToolChoice::None => anthropic::ToolChoice::None,
 659        }),
 660        metadata: None,
 661        stop_sequences: Vec::new(),
 662        temperature: request.temperature.or(Some(default_temperature)),
 663        top_k: None,
 664        top_p: None,
 665    }
 666}
 667
 668pub struct AnthropicEventMapper {
 669    tool_uses_by_index: HashMap<usize, RawToolUse>,
 670    usage: Usage,
 671    stop_reason: StopReason,
 672}
 673
 674impl AnthropicEventMapper {
 675    pub fn new() -> Self {
 676        Self {
 677            tool_uses_by_index: HashMap::default(),
 678            usage: Usage::default(),
 679            stop_reason: StopReason::EndTurn,
 680        }
 681    }
 682
 683    pub fn map_stream(
 684        mut self,
 685        events: Pin<Box<dyn Send + Stream<Item = Result<Event, AnthropicError>>>>,
 686    ) -> impl Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>
 687    {
 688        events.flat_map(move |event| {
 689            futures::stream::iter(match event {
 690                Ok(event) => self.map_event(event),
 691                Err(error) => vec![Err(error.into())],
 692            })
 693        })
 694    }
 695
 696    pub fn map_event(
 697        &mut self,
 698        event: Event,
 699    ) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
 700        match event {
 701            Event::ContentBlockStart {
 702                index,
 703                content_block,
 704            } => match content_block {
 705                ResponseContent::Text { text } => {
 706                    vec![Ok(LanguageModelCompletionEvent::Text(text))]
 707                }
 708                ResponseContent::Thinking { thinking } => {
 709                    vec![Ok(LanguageModelCompletionEvent::Thinking {
 710                        text: thinking,
 711                        signature: None,
 712                    })]
 713                }
 714                ResponseContent::RedactedThinking { data } => {
 715                    vec![Ok(LanguageModelCompletionEvent::RedactedThinking { data })]
 716                }
 717                ResponseContent::ToolUse { id, name, .. } => {
 718                    self.tool_uses_by_index.insert(
 719                        index,
 720                        RawToolUse {
 721                            id,
 722                            name,
 723                            input_json: String::new(),
 724                        },
 725                    );
 726                    Vec::new()
 727                }
 728            },
 729            Event::ContentBlockDelta { index, delta } => match delta {
 730                ContentDelta::TextDelta { text } => {
 731                    vec![Ok(LanguageModelCompletionEvent::Text(text))]
 732                }
 733                ContentDelta::ThinkingDelta { thinking } => {
 734                    vec![Ok(LanguageModelCompletionEvent::Thinking {
 735                        text: thinking,
 736                        signature: None,
 737                    })]
 738                }
 739                ContentDelta::SignatureDelta { signature } => {
 740                    vec![Ok(LanguageModelCompletionEvent::Thinking {
 741                        text: "".to_string(),
 742                        signature: Some(signature),
 743                    })]
 744                }
 745                ContentDelta::InputJsonDelta { partial_json } => {
 746                    if let Some(tool_use) = self.tool_uses_by_index.get_mut(&index) {
 747                        tool_use.input_json.push_str(&partial_json);
 748
 749                        // Try to convert invalid (incomplete) JSON into
 750                        // valid JSON that serde can accept, e.g. by closing
 751                        // unclosed delimiters. This way, we can update the
 752                        // UI with whatever has been streamed back so far.
 753                        if let Ok(input) = serde_json::Value::from_str(
 754                            &partial_json_fixer::fix_json(&tool_use.input_json),
 755                        ) {
 756                            return vec![Ok(LanguageModelCompletionEvent::ToolUse(
 757                                LanguageModelToolUse {
 758                                    id: tool_use.id.clone().into(),
 759                                    name: tool_use.name.clone().into(),
 760                                    is_input_complete: false,
 761                                    raw_input: tool_use.input_json.clone(),
 762                                    input,
 763                                },
 764                            ))];
 765                        }
 766                    }
 767                    vec![]
 768                }
 769            },
 770            Event::ContentBlockStop { index } => {
 771                if let Some(tool_use) = self.tool_uses_by_index.remove(&index) {
 772                    let input_json = tool_use.input_json.trim();
 773                    let input_value = if input_json.is_empty() {
 774                        Ok(serde_json::Value::Object(serde_json::Map::default()))
 775                    } else {
 776                        serde_json::Value::from_str(input_json)
 777                    };
 778                    let event_result = match input_value {
 779                        Ok(input) => Ok(LanguageModelCompletionEvent::ToolUse(
 780                            LanguageModelToolUse {
 781                                id: tool_use.id.into(),
 782                                name: tool_use.name.into(),
 783                                is_input_complete: true,
 784                                input,
 785                                raw_input: tool_use.input_json.clone(),
 786                            },
 787                        )),
 788                        Err(json_parse_err) => {
 789                            Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
 790                                id: tool_use.id.into(),
 791                                tool_name: tool_use.name.into(),
 792                                raw_input: input_json.into(),
 793                                json_parse_error: json_parse_err.to_string(),
 794                            })
 795                        }
 796                    };
 797
 798                    vec![event_result]
 799                } else {
 800                    Vec::new()
 801                }
 802            }
 803            Event::MessageStart { message } => {
 804                update_usage(&mut self.usage, &message.usage);
 805                vec![
 806                    Ok(LanguageModelCompletionEvent::UsageUpdate(convert_usage(
 807                        &self.usage,
 808                    ))),
 809                    Ok(LanguageModelCompletionEvent::StartMessage {
 810                        message_id: message.id,
 811                    }),
 812                ]
 813            }
 814            Event::MessageDelta { delta, usage } => {
 815                update_usage(&mut self.usage, &usage);
 816                if let Some(stop_reason) = delta.stop_reason.as_deref() {
 817                    self.stop_reason = match stop_reason {
 818                        "end_turn" => StopReason::EndTurn,
 819                        "max_tokens" => StopReason::MaxTokens,
 820                        "tool_use" => StopReason::ToolUse,
 821                        "refusal" => StopReason::Refusal,
 822                        _ => {
 823                            log::error!("Unexpected anthropic stop_reason: {stop_reason}");
 824                            StopReason::EndTurn
 825                        }
 826                    };
 827                }
 828                vec![Ok(LanguageModelCompletionEvent::UsageUpdate(
 829                    convert_usage(&self.usage),
 830                ))]
 831            }
 832            Event::MessageStop => {
 833                vec![Ok(LanguageModelCompletionEvent::Stop(self.stop_reason))]
 834            }
 835            Event::Error { error } => {
 836                vec![Err(error.into())]
 837            }
 838            _ => Vec::new(),
 839        }
 840    }
 841}
 842
 843struct RawToolUse {
 844    id: String,
 845    name: String,
 846    input_json: String,
 847}
 848
 849/// Updates usage data by preferring counts from `new`.
 850fn update_usage(usage: &mut Usage, new: &Usage) {
 851    if let Some(input_tokens) = new.input_tokens {
 852        usage.input_tokens = Some(input_tokens);
 853    }
 854    if let Some(output_tokens) = new.output_tokens {
 855        usage.output_tokens = Some(output_tokens);
 856    }
 857    if let Some(cache_creation_input_tokens) = new.cache_creation_input_tokens {
 858        usage.cache_creation_input_tokens = Some(cache_creation_input_tokens);
 859    }
 860    if let Some(cache_read_input_tokens) = new.cache_read_input_tokens {
 861        usage.cache_read_input_tokens = Some(cache_read_input_tokens);
 862    }
 863}
 864
 865fn convert_usage(usage: &Usage) -> language_model::TokenUsage {
 866    language_model::TokenUsage {
 867        input_tokens: usage.input_tokens.unwrap_or(0),
 868        output_tokens: usage.output_tokens.unwrap_or(0),
 869        cache_creation_input_tokens: usage.cache_creation_input_tokens.unwrap_or(0),
 870        cache_read_input_tokens: usage.cache_read_input_tokens.unwrap_or(0),
 871    }
 872}
 873
 874struct ConfigurationView {
 875    api_key_editor: Entity<Editor>,
 876    state: gpui::Entity<State>,
 877    load_credentials_task: Option<Task<()>>,
 878    target_agent: ConfigurationViewTargetAgent,
 879}
 880
 881impl ConfigurationView {
 882    const PLACEHOLDER_TEXT: &'static str = "sk-ant-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx";
 883
 884    fn new(
 885        state: gpui::Entity<State>,
 886        target_agent: ConfigurationViewTargetAgent,
 887        window: &mut Window,
 888        cx: &mut Context<Self>,
 889    ) -> Self {
 890        cx.observe(&state, |_, _, cx| {
 891            cx.notify();
 892        })
 893        .detach();
 894
 895        let load_credentials_task = Some(cx.spawn({
 896            let state = state.clone();
 897            async move |this, cx| {
 898                if let Some(task) = state
 899                    .update(cx, |state, cx| state.authenticate(cx))
 900                    .log_err()
 901                {
 902                    // We don't log an error, because "not signed in" is also an error.
 903                    let _ = task.await;
 904                }
 905                this.update(cx, |this, cx| {
 906                    this.load_credentials_task = None;
 907                    cx.notify();
 908                })
 909                .log_err();
 910            }
 911        }));
 912
 913        Self {
 914            api_key_editor: cx.new(|cx| {
 915                let mut editor = Editor::single_line(window, cx);
 916                editor.set_placeholder_text(Self::PLACEHOLDER_TEXT, window, cx);
 917                editor
 918            }),
 919            state,
 920            load_credentials_task,
 921            target_agent,
 922        }
 923    }
 924
 925    fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
 926        let api_key = self.api_key_editor.read(cx).text(cx);
 927        if api_key.is_empty() {
 928            return;
 929        }
 930
 931        // url changes can cause the editor to be displayed again
 932        self.api_key_editor
 933            .update(cx, |editor, cx| editor.set_text("", window, cx));
 934
 935        let state = self.state.clone();
 936        cx.spawn_in(window, async move |_, cx| {
 937            state
 938                .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))?
 939                .await
 940        })
 941        .detach_and_log_err(cx);
 942    }
 943
 944    fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context<Self>) {
 945        self.api_key_editor
 946            .update(cx, |editor, cx| editor.set_text("", window, cx));
 947
 948        let state = self.state.clone();
 949        cx.spawn_in(window, async move |_, cx| {
 950            state
 951                .update(cx, |state, cx| state.set_api_key(None, cx))?
 952                .await
 953        })
 954        .detach_and_log_err(cx);
 955    }
 956
 957    fn render_api_key_editor(&self, cx: &mut Context<Self>) -> impl IntoElement {
 958        let settings = ThemeSettings::get_global(cx);
 959        let text_style = TextStyle {
 960            color: cx.theme().colors().text,
 961            font_family: settings.ui_font.family.clone(),
 962            font_features: settings.ui_font.features.clone(),
 963            font_fallbacks: settings.ui_font.fallbacks.clone(),
 964            font_size: rems(0.875).into(),
 965            font_weight: settings.ui_font.weight,
 966            font_style: FontStyle::Normal,
 967            line_height: relative(1.3),
 968            white_space: WhiteSpace::Normal,
 969            ..Default::default()
 970        };
 971        EditorElement::new(
 972            &self.api_key_editor,
 973            EditorStyle {
 974                background: cx.theme().colors().editor_background,
 975                local_player: cx.theme().players().local(),
 976                text: text_style,
 977                ..Default::default()
 978            },
 979        )
 980    }
 981
 982    fn should_render_editor(&self, cx: &mut Context<Self>) -> bool {
 983        !self.state.read(cx).is_authenticated()
 984    }
 985}
 986
 987impl Render for ConfigurationView {
 988    fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
 989        let env_var_set = self.state.read(cx).api_key_state.is_from_env_var();
 990
 991        if self.load_credentials_task.is_some() {
 992            div().child(Label::new("Loading credentials...")).into_any()
 993        } else if self.should_render_editor(cx) {
 994            v_flex()
 995                .size_full()
 996                .on_action(cx.listener(Self::save_api_key))
 997                .child(Label::new(format!("To use {}, you need to add an API key. Follow these steps:", match &self.target_agent {
 998                    ConfigurationViewTargetAgent::ZedAgent => "Zed's agent with Anthropic".into(),
 999                    ConfigurationViewTargetAgent::Other(agent) => agent.clone(),
1000                })))
1001                .child(
1002                    List::new()
1003                        .child(
1004                            InstructionListItem::new(
1005                                "Create one by visiting",
1006                                Some("Anthropic's settings"),
1007                                Some("https://console.anthropic.com/settings/keys")
1008                            )
1009                        )
1010                        .child(
1011                            InstructionListItem::text_only("Paste your API key below and hit enter to start using the agent")
1012                        )
1013                )
1014                .child(
1015                    h_flex()
1016                        .w_full()
1017                        .my_2()
1018                        .px_2()
1019                        .py_1()
1020                        .bg(cx.theme().colors().editor_background)
1021                        .border_1()
1022                        .border_color(cx.theme().colors().border)
1023                        .rounded_sm()
1024                        .child(self.render_api_key_editor(cx)),
1025                )
1026                .child(
1027                    Label::new(
1028                        format!("You can also assign the {API_KEY_ENV_VAR_NAME} environment variable and restart Zed."),
1029                    )
1030                    .size(LabelSize::Small)
1031                    .color(Color::Muted),
1032                )
1033                .into_any()
1034        } else {
1035            h_flex()
1036                .mt_1()
1037                .p_1()
1038                .justify_between()
1039                .rounded_md()
1040                .border_1()
1041                .border_color(cx.theme().colors().border)
1042                .bg(cx.theme().colors().background)
1043                .child(
1044                    h_flex()
1045                        .gap_1()
1046                        .child(Icon::new(IconName::Check).color(Color::Success))
1047                        .child(Label::new(if env_var_set {
1048                            format!("API key set in {API_KEY_ENV_VAR_NAME} environment variable")
1049                        } else {
1050                            let api_url = AnthropicLanguageModelProvider::api_url(cx);
1051                            if api_url == ANTHROPIC_API_URL {
1052                                "API key configured".to_string()
1053                            } else {
1054                                format!("API key configured for {}", truncate_and_trailoff(&api_url, 32))
1055                            }
1056                        })),
1057                )
1058                .child(
1059                    Button::new("reset-key", "Reset Key")
1060                        .label_size(LabelSize::Small)
1061                        .icon(Some(IconName::Trash))
1062                        .icon_size(IconSize::Small)
1063                        .icon_position(IconPosition::Start)
1064                        .disabled(env_var_set)
1065                        .when(env_var_set, |this| {
1066                            this.tooltip(Tooltip::text(format!("To reset your API key, unset the {API_KEY_ENV_VAR_NAME} environment variable.")))
1067                        })
1068                        .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx))),
1069                )
1070                .into_any()
1071        }
1072    }
1073}
1074
1075#[cfg(test)]
1076mod tests {
1077    use super::*;
1078    use anthropic::AnthropicModelMode;
1079    use language_model::{LanguageModelRequestMessage, MessageContent};
1080
1081    #[test]
1082    fn test_cache_control_only_on_last_segment() {
1083        let request = LanguageModelRequest {
1084            messages: vec![LanguageModelRequestMessage {
1085                role: Role::User,
1086                content: vec![
1087                    MessageContent::Text("Some prompt".to_string()),
1088                    MessageContent::Image(language_model::LanguageModelImage::empty()),
1089                    MessageContent::Image(language_model::LanguageModelImage::empty()),
1090                    MessageContent::Image(language_model::LanguageModelImage::empty()),
1091                    MessageContent::Image(language_model::LanguageModelImage::empty()),
1092                ],
1093                cache: true,
1094            }],
1095            thread_id: None,
1096            prompt_id: None,
1097            intent: None,
1098            mode: None,
1099            stop: vec![],
1100            temperature: None,
1101            tools: vec![],
1102            tool_choice: None,
1103            thinking_allowed: true,
1104        };
1105
1106        let anthropic_request = into_anthropic(
1107            request,
1108            "claude-3-5-sonnet".to_string(),
1109            0.7,
1110            4096,
1111            AnthropicModelMode::Default,
1112        );
1113
1114        assert_eq!(anthropic_request.messages.len(), 1);
1115
1116        let message = &anthropic_request.messages[0];
1117        assert_eq!(message.content.len(), 5);
1118
1119        assert!(matches!(
1120            message.content[0],
1121            anthropic::RequestContent::Text {
1122                cache_control: None,
1123                ..
1124            }
1125        ));
1126        for i in 1..3 {
1127            assert!(matches!(
1128                message.content[i],
1129                anthropic::RequestContent::Image {
1130                    cache_control: None,
1131                    ..
1132                }
1133            ));
1134        }
1135
1136        assert!(matches!(
1137            message.content[4],
1138            anthropic::RequestContent::Image {
1139                cache_control: Some(anthropic::CacheControl {
1140                    cache_type: anthropic::CacheControlType::Ephemeral,
1141                }),
1142                ..
1143            }
1144        ));
1145    }
1146}