cloud.rs

  1use super::open_ai::count_open_ai_tokens;
  2use anthropic::AnthropicError;
  3use anyhow::{anyhow, Result};
  4use client::{
  5    zed_urls, Client, PerformCompletionParams, UserStore, EXPIRED_LLM_TOKEN_HEADER_NAME,
  6    MAX_LLM_MONTHLY_SPEND_REACHED_HEADER_NAME,
  7};
  8use collections::BTreeMap;
  9use feature_flags::{FeatureFlagAppExt, LlmClosedBeta, ZedPro};
 10use futures::{
 11    future::BoxFuture, stream::BoxStream, AsyncBufReadExt, FutureExt, Stream, StreamExt,
 12    TryStreamExt as _,
 13};
 14use gpui::{
 15    AnyElement, AnyView, App, AsyncApp, Context, Entity, EventEmitter, Global, ReadGlobal,
 16    Subscription, Task,
 17};
 18use http_client::{AsyncBody, HttpClient, Method, Response, StatusCode};
 19use language_model::{
 20    CloudModel, LanguageModel, LanguageModelCacheConfiguration, LanguageModelId, LanguageModelName,
 21    LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState,
 22    LanguageModelProviderTosView, LanguageModelRequest, RateLimiter, ZED_CLOUD_PROVIDER_ID,
 23};
 24use language_model::{
 25    LanguageModelAvailability, LanguageModelCompletionEvent, LanguageModelProvider,
 26};
 27use proto::TypedEnvelope;
 28use schemars::JsonSchema;
 29use serde::{de::DeserializeOwned, Deserialize, Serialize};
 30use serde_json::value::RawValue;
 31use settings::{Settings, SettingsStore};
 32use smol::{
 33    io::{AsyncReadExt, BufReader},
 34    lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard},
 35};
 36use std::fmt;
 37use std::{
 38    future,
 39    sync::{Arc, LazyLock},
 40};
 41use strum::IntoEnumIterator;
 42use thiserror::Error;
 43use ui::{prelude::*, TintColor};
 44
 45use crate::provider::anthropic::map_to_language_model_completion_events;
 46use crate::AllLanguageModelSettings;
 47
 48use super::anthropic::count_anthropic_tokens;
 49
 50pub const PROVIDER_NAME: &str = "Zed";
 51
 52const ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: Option<&str> =
 53    option_env!("ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON");
 54
 55fn zed_cloud_provider_additional_models() -> &'static [AvailableModel] {
 56    static ADDITIONAL_MODELS: LazyLock<Vec<AvailableModel>> = LazyLock::new(|| {
 57        ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON
 58            .map(|json| serde_json::from_str(json).unwrap())
 59            .unwrap_or_default()
 60    });
 61    ADDITIONAL_MODELS.as_slice()
 62}
 63
 64#[derive(Default, Clone, Debug, PartialEq)]
 65pub struct ZedDotDevSettings {
 66    pub available_models: Vec<AvailableModel>,
 67}
 68
 69#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
 70#[serde(rename_all = "lowercase")]
 71pub enum AvailableProvider {
 72    Anthropic,
 73    OpenAi,
 74    Google,
 75}
 76
 77#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
 78pub struct AvailableModel {
 79    /// The provider of the language model.
 80    pub provider: AvailableProvider,
 81    /// The model's name in the provider's API. e.g. claude-3-5-sonnet-20240620
 82    pub name: String,
 83    /// The name displayed in the UI, such as in the assistant panel model dropdown menu.
 84    pub display_name: Option<String>,
 85    /// The size of the context window, indicating the maximum number of tokens the model can process.
 86    pub max_tokens: usize,
 87    /// The maximum number of output tokens allowed by the model.
 88    pub max_output_tokens: Option<u32>,
 89    /// The maximum number of completion tokens allowed by the model (o1-* only)
 90    pub max_completion_tokens: Option<u32>,
 91    /// Override this model with a different Anthropic model for tool calls.
 92    pub tool_override: Option<String>,
 93    /// Indicates whether this custom model supports caching.
 94    pub cache_configuration: Option<LanguageModelCacheConfiguration>,
 95    /// The default temperature to use for this model.
 96    pub default_temperature: Option<f32>,
 97    /// Any extra beta headers to provide when using the model.
 98    #[serde(default)]
 99    pub extra_beta_headers: Vec<String>,
100}
101
102struct GlobalRefreshLlmTokenListener(Entity<RefreshLlmTokenListener>);
103
104impl Global for GlobalRefreshLlmTokenListener {}
105
106pub struct RefreshLlmTokenEvent;
107
108pub struct RefreshLlmTokenListener {
109    _llm_token_subscription: client::Subscription,
110}
111
112impl EventEmitter<RefreshLlmTokenEvent> for RefreshLlmTokenListener {}
113
114impl RefreshLlmTokenListener {
115    pub fn register(client: Arc<Client>, cx: &mut App) {
116        let listener = cx.new(|cx| RefreshLlmTokenListener::new(client, cx));
117        cx.set_global(GlobalRefreshLlmTokenListener(listener));
118    }
119
120    pub fn global(cx: &App) -> Entity<Self> {
121        GlobalRefreshLlmTokenListener::global(cx).0.clone()
122    }
123
124    fn new(client: Arc<Client>, cx: &mut Context<Self>) -> Self {
125        Self {
126            _llm_token_subscription: client
127                .add_message_handler(cx.weak_entity(), Self::handle_refresh_llm_token),
128        }
129    }
130
131    async fn handle_refresh_llm_token(
132        this: Entity<Self>,
133        _: TypedEnvelope<proto::RefreshLlmToken>,
134        mut cx: AsyncApp,
135    ) -> Result<()> {
136        this.update(&mut cx, |_this, cx| cx.emit(RefreshLlmTokenEvent))
137    }
138}
139
140pub struct CloudLanguageModelProvider {
141    client: Arc<Client>,
142    state: gpui::Entity<State>,
143    _maintain_client_status: Task<()>,
144}
145
146pub struct State {
147    client: Arc<Client>,
148    llm_api_token: LlmApiToken,
149    user_store: Entity<UserStore>,
150    status: client::Status,
151    accept_terms: Option<Task<Result<()>>>,
152    _settings_subscription: Subscription,
153    _llm_token_subscription: Subscription,
154}
155
156impl State {
157    fn new(
158        client: Arc<Client>,
159        user_store: Entity<UserStore>,
160        status: client::Status,
161        cx: &mut Context<Self>,
162    ) -> Self {
163        let refresh_llm_token_listener = RefreshLlmTokenListener::global(cx);
164
165        Self {
166            client: client.clone(),
167            llm_api_token: LlmApiToken::default(),
168            user_store,
169            status,
170            accept_terms: None,
171            _settings_subscription: cx.observe_global::<SettingsStore>(|_, cx| {
172                cx.notify();
173            }),
174            _llm_token_subscription: cx.subscribe(
175                &refresh_llm_token_listener,
176                |this, _listener, _event, cx| {
177                    let client = this.client.clone();
178                    let llm_api_token = this.llm_api_token.clone();
179                    cx.spawn(|_this, _cx| async move {
180                        llm_api_token.refresh(&client).await?;
181                        anyhow::Ok(())
182                    })
183                    .detach_and_log_err(cx);
184                },
185            ),
186        }
187    }
188
189    fn is_signed_out(&self) -> bool {
190        self.status.is_signed_out()
191    }
192
193    fn authenticate(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
194        let client = self.client.clone();
195        cx.spawn(move |this, mut cx| async move {
196            client.authenticate_and_connect(true, &cx).await?;
197            this.update(&mut cx, |_, cx| cx.notify())
198        })
199    }
200
201    fn has_accepted_terms_of_service(&self, cx: &App) -> bool {
202        self.user_store
203            .read(cx)
204            .current_user_has_accepted_terms()
205            .unwrap_or(false)
206    }
207
208    fn accept_terms_of_service(&mut self, cx: &mut Context<Self>) {
209        let user_store = self.user_store.clone();
210        self.accept_terms = Some(cx.spawn(move |this, mut cx| async move {
211            let _ = user_store
212                .update(&mut cx, |store, cx| store.accept_terms_of_service(cx))?
213                .await;
214            this.update(&mut cx, |this, cx| {
215                this.accept_terms = None;
216                cx.notify()
217            })
218        }));
219    }
220}
221
222impl CloudLanguageModelProvider {
223    pub fn new(user_store: Entity<UserStore>, client: Arc<Client>, cx: &mut App) -> Self {
224        let mut status_rx = client.status();
225        let status = *status_rx.borrow();
226
227        let state = cx.new(|cx| State::new(client.clone(), user_store.clone(), status, cx));
228
229        let state_ref = state.downgrade();
230        let maintain_client_status = cx.spawn(|mut cx| async move {
231            while let Some(status) = status_rx.next().await {
232                if let Some(this) = state_ref.upgrade() {
233                    _ = this.update(&mut cx, |this, cx| {
234                        if this.status != status {
235                            this.status = status;
236                            cx.notify();
237                        }
238                    });
239                } else {
240                    break;
241                }
242            }
243        });
244
245        Self {
246            client,
247            state: state.clone(),
248            _maintain_client_status: maintain_client_status,
249        }
250    }
251}
252
253impl LanguageModelProviderState for CloudLanguageModelProvider {
254    type ObservableEntity = State;
255
256    fn observable_entity(&self) -> Option<gpui::Entity<Self::ObservableEntity>> {
257        Some(self.state.clone())
258    }
259}
260
261impl LanguageModelProvider for CloudLanguageModelProvider {
262    fn id(&self) -> LanguageModelProviderId {
263        LanguageModelProviderId(ZED_CLOUD_PROVIDER_ID.into())
264    }
265
266    fn name(&self) -> LanguageModelProviderName {
267        LanguageModelProviderName(PROVIDER_NAME.into())
268    }
269
270    fn icon(&self) -> IconName {
271        IconName::AiZed
272    }
273
274    fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
275        let mut models = BTreeMap::default();
276
277        if cx.is_staff() {
278            for model in anthropic::Model::iter() {
279                if !matches!(model, anthropic::Model::Custom { .. }) {
280                    models.insert(model.id().to_string(), CloudModel::Anthropic(model));
281                }
282            }
283            for model in open_ai::Model::iter() {
284                if !matches!(model, open_ai::Model::Custom { .. }) {
285                    models.insert(model.id().to_string(), CloudModel::OpenAi(model));
286                }
287            }
288            for model in google_ai::Model::iter() {
289                if !matches!(model, google_ai::Model::Custom { .. }) {
290                    models.insert(model.id().to_string(), CloudModel::Google(model));
291                }
292            }
293        } else {
294            models.insert(
295                anthropic::Model::Claude3_5Sonnet.id().to_string(),
296                CloudModel::Anthropic(anthropic::Model::Claude3_5Sonnet),
297            );
298        }
299
300        let llm_closed_beta_models = if cx.has_flag::<LlmClosedBeta>() {
301            zed_cloud_provider_additional_models()
302        } else {
303            &[]
304        };
305
306        // Override with available models from settings
307        for model in AllLanguageModelSettings::get_global(cx)
308            .zed_dot_dev
309            .available_models
310            .iter()
311            .chain(llm_closed_beta_models)
312            .cloned()
313        {
314            let model = match model.provider {
315                AvailableProvider::Anthropic => CloudModel::Anthropic(anthropic::Model::Custom {
316                    name: model.name.clone(),
317                    display_name: model.display_name.clone(),
318                    max_tokens: model.max_tokens,
319                    tool_override: model.tool_override.clone(),
320                    cache_configuration: model.cache_configuration.as_ref().map(|config| {
321                        anthropic::AnthropicModelCacheConfiguration {
322                            max_cache_anchors: config.max_cache_anchors,
323                            should_speculate: config.should_speculate,
324                            min_total_token: config.min_total_token,
325                        }
326                    }),
327                    default_temperature: model.default_temperature,
328                    max_output_tokens: model.max_output_tokens,
329                    extra_beta_headers: model.extra_beta_headers.clone(),
330                }),
331                AvailableProvider::OpenAi => CloudModel::OpenAi(open_ai::Model::Custom {
332                    name: model.name.clone(),
333                    display_name: model.display_name.clone(),
334                    max_tokens: model.max_tokens,
335                    max_output_tokens: model.max_output_tokens,
336                    max_completion_tokens: model.max_completion_tokens,
337                }),
338                AvailableProvider::Google => CloudModel::Google(google_ai::Model::Custom {
339                    name: model.name.clone(),
340                    display_name: model.display_name.clone(),
341                    max_tokens: model.max_tokens,
342                }),
343            };
344            models.insert(model.id().to_string(), model.clone());
345        }
346
347        let llm_api_token = self.state.read(cx).llm_api_token.clone();
348        models
349            .into_values()
350            .map(|model| {
351                Arc::new(CloudLanguageModel {
352                    id: LanguageModelId::from(model.id().to_string()),
353                    model,
354                    llm_api_token: llm_api_token.clone(),
355                    client: self.client.clone(),
356                    request_limiter: RateLimiter::new(4),
357                }) as Arc<dyn LanguageModel>
358            })
359            .collect()
360    }
361
362    fn is_authenticated(&self, cx: &App) -> bool {
363        !self.state.read(cx).is_signed_out()
364    }
365
366    fn authenticate(&self, _cx: &mut App) -> Task<Result<()>> {
367        Task::ready(Ok(()))
368    }
369
370    fn configuration_view(&self, _: &mut Window, cx: &mut App) -> AnyView {
371        cx.new(|_| ConfigurationView {
372            state: self.state.clone(),
373        })
374        .into()
375    }
376
377    fn must_accept_terms(&self, cx: &App) -> bool {
378        !self.state.read(cx).has_accepted_terms_of_service(cx)
379    }
380
381    fn render_accept_terms(
382        &self,
383        view: LanguageModelProviderTosView,
384        cx: &mut App,
385    ) -> Option<AnyElement> {
386        render_accept_terms(self.state.clone(), view, cx)
387    }
388
389    fn reset_credentials(&self, _cx: &mut App) -> Task<Result<()>> {
390        Task::ready(Ok(()))
391    }
392}
393
394fn render_accept_terms(
395    state: Entity<State>,
396    view_kind: LanguageModelProviderTosView,
397    cx: &mut App,
398) -> Option<AnyElement> {
399    if state.read(cx).has_accepted_terms_of_service(cx) {
400        return None;
401    }
402
403    let accept_terms_disabled = state.read(cx).accept_terms.is_some();
404
405    let terms_button = Button::new("terms_of_service", "Terms of Service")
406        .style(ButtonStyle::Subtle)
407        .icon(IconName::ArrowUpRight)
408        .icon_color(Color::Muted)
409        .icon_size(IconSize::XSmall)
410        .on_click(move |_, _window, cx| cx.open_url("https://zed.dev/terms-of-service"));
411
412    let text = "To start using Zed AI, please read and accept the";
413
414    let form = v_flex()
415        .w_full()
416        .gap_2()
417        .when(
418            view_kind == LanguageModelProviderTosView::ThreadEmptyState,
419            |form| form.items_center(),
420        )
421        .child(
422            h_flex()
423                .flex_wrap()
424                .when(
425                    view_kind == LanguageModelProviderTosView::ThreadEmptyState,
426                    |form| form.justify_center(),
427                )
428                .child(Label::new(text))
429                .child(terms_button),
430        )
431        .child({
432            let button_container = h_flex().w_full().child(
433                Button::new("accept_terms", "I accept the Terms of Service")
434                    .style(ButtonStyle::Tinted(TintColor::Accent))
435                    .disabled(accept_terms_disabled)
436                    .on_click({
437                        let state = state.downgrade();
438                        move |_, _window, cx| {
439                            state
440                                .update(cx, |state, cx| state.accept_terms_of_service(cx))
441                                .ok();
442                        }
443                    }),
444            );
445
446            match view_kind {
447                LanguageModelProviderTosView::ThreadEmptyState => button_container.justify_center(),
448                LanguageModelProviderTosView::PromptEditorPopup => button_container.justify_end(),
449                LanguageModelProviderTosView::Configuration => button_container.justify_start(),
450            }
451        });
452
453    Some(form.into_any())
454}
455
456pub struct CloudLanguageModel {
457    id: LanguageModelId,
458    model: CloudModel,
459    llm_api_token: LlmApiToken,
460    client: Arc<Client>,
461    request_limiter: RateLimiter,
462}
463
464#[derive(Clone, Default)]
465pub struct LlmApiToken(Arc<RwLock<Option<String>>>);
466
467#[derive(Error, Debug)]
468pub struct PaymentRequiredError;
469
470impl fmt::Display for PaymentRequiredError {
471    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
472        write!(
473            f,
474            "Payment required to use this language model. Please upgrade your account."
475        )
476    }
477}
478
479#[derive(Error, Debug)]
480pub struct MaxMonthlySpendReachedError;
481
482impl fmt::Display for MaxMonthlySpendReachedError {
483    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
484        write!(
485            f,
486            "Maximum spending limit reached for this month. For more usage, increase your spending limit."
487        )
488    }
489}
490
491impl CloudLanguageModel {
492    async fn perform_llm_completion(
493        client: Arc<Client>,
494        llm_api_token: LlmApiToken,
495        body: PerformCompletionParams,
496    ) -> Result<Response<AsyncBody>> {
497        let http_client = &client.http_client();
498
499        let mut token = llm_api_token.acquire(&client).await?;
500        let mut did_retry = false;
501
502        let response = loop {
503            let request_builder = http_client::Request::builder();
504            let request = request_builder
505                .method(Method::POST)
506                .uri(http_client.build_zed_llm_url("/completion", &[])?.as_ref())
507                .header("Content-Type", "application/json")
508                .header("Authorization", format!("Bearer {token}"))
509                .body(serde_json::to_string(&body)?.into())?;
510            let mut response = http_client.send(request).await?;
511            if response.status().is_success() {
512                break response;
513            } else if !did_retry
514                && response
515                    .headers()
516                    .get(EXPIRED_LLM_TOKEN_HEADER_NAME)
517                    .is_some()
518            {
519                did_retry = true;
520                token = llm_api_token.refresh(&client).await?;
521            } else if response.status() == StatusCode::FORBIDDEN
522                && response
523                    .headers()
524                    .get(MAX_LLM_MONTHLY_SPEND_REACHED_HEADER_NAME)
525                    .is_some()
526            {
527                break Err(anyhow!(MaxMonthlySpendReachedError))?;
528            } else if response.status() == StatusCode::PAYMENT_REQUIRED {
529                break Err(anyhow!(PaymentRequiredError))?;
530            } else {
531                let mut body = String::new();
532                response.body_mut().read_to_string(&mut body).await?;
533                break Err(anyhow!(
534                    "cloud language model completion failed with status {}: {body}",
535                    response.status()
536                ))?;
537            }
538        };
539
540        Ok(response)
541    }
542}
543
544impl LanguageModel for CloudLanguageModel {
545    fn id(&self) -> LanguageModelId {
546        self.id.clone()
547    }
548
549    fn name(&self) -> LanguageModelName {
550        LanguageModelName::from(self.model.display_name().to_string())
551    }
552
553    fn icon(&self) -> Option<IconName> {
554        self.model.icon()
555    }
556
557    fn provider_id(&self) -> LanguageModelProviderId {
558        LanguageModelProviderId(ZED_CLOUD_PROVIDER_ID.into())
559    }
560
561    fn provider_name(&self) -> LanguageModelProviderName {
562        LanguageModelProviderName(PROVIDER_NAME.into())
563    }
564
565    fn telemetry_id(&self) -> String {
566        format!("zed.dev/{}", self.model.id())
567    }
568
569    fn availability(&self) -> LanguageModelAvailability {
570        self.model.availability()
571    }
572
573    fn max_token_count(&self) -> usize {
574        self.model.max_token_count()
575    }
576
577    fn cache_configuration(&self) -> Option<LanguageModelCacheConfiguration> {
578        match &self.model {
579            CloudModel::Anthropic(model) => {
580                model
581                    .cache_configuration()
582                    .map(|cache| LanguageModelCacheConfiguration {
583                        max_cache_anchors: cache.max_cache_anchors,
584                        should_speculate: cache.should_speculate,
585                        min_total_token: cache.min_total_token,
586                    })
587            }
588            CloudModel::OpenAi(_) | CloudModel::Google(_) => None,
589        }
590    }
591
592    fn count_tokens(
593        &self,
594        request: LanguageModelRequest,
595        cx: &App,
596    ) -> BoxFuture<'static, Result<usize>> {
597        match self.model.clone() {
598            CloudModel::Anthropic(_) => count_anthropic_tokens(request, cx),
599            CloudModel::OpenAi(model) => count_open_ai_tokens(request, model, cx),
600            CloudModel::Google(model) => {
601                let client = self.client.clone();
602                let request = request.into_google(model.id().into());
603                let request = google_ai::CountTokensRequest {
604                    contents: request.contents,
605                };
606                async move {
607                    let request = serde_json::to_string(&request)?;
608                    let response = client
609                        .request(proto::CountLanguageModelTokens {
610                            provider: proto::LanguageModelProvider::Google as i32,
611                            request,
612                        })
613                        .await?;
614                    Ok(response.token_count as usize)
615                }
616                .boxed()
617            }
618        }
619    }
620
621    fn stream_completion(
622        &self,
623        request: LanguageModelRequest,
624        _cx: &AsyncApp,
625    ) -> BoxFuture<'static, Result<BoxStream<'static, Result<LanguageModelCompletionEvent>>>> {
626        match &self.model {
627            CloudModel::Anthropic(model) => {
628                let request = request.into_anthropic(
629                    model.id().into(),
630                    model.default_temperature(),
631                    model.max_output_tokens(),
632                );
633                let client = self.client.clone();
634                let llm_api_token = self.llm_api_token.clone();
635                let future = self.request_limiter.stream(async move {
636                    let response = Self::perform_llm_completion(
637                        client.clone(),
638                        llm_api_token,
639                        PerformCompletionParams {
640                            provider: client::LanguageModelProvider::Anthropic,
641                            model: request.model.clone(),
642                            provider_request: RawValue::from_string(serde_json::to_string(
643                                &request,
644                            )?)?,
645                        },
646                    )
647                    .await?;
648                    Ok(map_to_language_model_completion_events(Box::pin(
649                        response_lines(response).map_err(AnthropicError::Other),
650                    )))
651                });
652                async move { Ok(future.await?.boxed()) }.boxed()
653            }
654            CloudModel::OpenAi(model) => {
655                let client = self.client.clone();
656                let request = request.into_open_ai(model.id().into(), model.max_output_tokens());
657                let llm_api_token = self.llm_api_token.clone();
658                let future = self.request_limiter.stream(async move {
659                    let response = Self::perform_llm_completion(
660                        client.clone(),
661                        llm_api_token,
662                        PerformCompletionParams {
663                            provider: client::LanguageModelProvider::OpenAi,
664                            model: request.model.clone(),
665                            provider_request: RawValue::from_string(serde_json::to_string(
666                                &request,
667                            )?)?,
668                        },
669                    )
670                    .await?;
671                    Ok(open_ai::extract_text_from_events(response_lines(response)))
672                });
673                async move {
674                    Ok(future
675                        .await?
676                        .map(|result| result.map(LanguageModelCompletionEvent::Text))
677                        .boxed())
678                }
679                .boxed()
680            }
681            CloudModel::Google(model) => {
682                let client = self.client.clone();
683                let request = request.into_google(model.id().into());
684                let llm_api_token = self.llm_api_token.clone();
685                let future = self.request_limiter.stream(async move {
686                    let response = Self::perform_llm_completion(
687                        client.clone(),
688                        llm_api_token,
689                        PerformCompletionParams {
690                            provider: client::LanguageModelProvider::Google,
691                            model: request.model.clone(),
692                            provider_request: RawValue::from_string(serde_json::to_string(
693                                &request,
694                            )?)?,
695                        },
696                    )
697                    .await?;
698                    Ok(google_ai::extract_text_from_events(response_lines(
699                        response,
700                    )))
701                });
702                async move {
703                    Ok(future
704                        .await?
705                        .map(|result| result.map(LanguageModelCompletionEvent::Text))
706                        .boxed())
707                }
708                .boxed()
709            }
710        }
711    }
712
713    fn use_any_tool(
714        &self,
715        request: LanguageModelRequest,
716        tool_name: String,
717        tool_description: String,
718        input_schema: serde_json::Value,
719        _cx: &AsyncApp,
720    ) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
721        let client = self.client.clone();
722        let llm_api_token = self.llm_api_token.clone();
723
724        match &self.model {
725            CloudModel::Anthropic(model) => {
726                let mut request = request.into_anthropic(
727                    model.tool_model_id().into(),
728                    model.default_temperature(),
729                    model.max_output_tokens(),
730                );
731                request.tool_choice = Some(anthropic::ToolChoice::Tool {
732                    name: tool_name.clone(),
733                });
734                request.tools = vec![anthropic::Tool {
735                    name: tool_name.clone(),
736                    description: tool_description,
737                    input_schema,
738                }];
739
740                self.request_limiter
741                    .run(async move {
742                        let response = Self::perform_llm_completion(
743                            client.clone(),
744                            llm_api_token,
745                            PerformCompletionParams {
746                                provider: client::LanguageModelProvider::Anthropic,
747                                model: request.model.clone(),
748                                provider_request: RawValue::from_string(serde_json::to_string(
749                                    &request,
750                                )?)?,
751                            },
752                        )
753                        .await?;
754
755                        Ok(anthropic::extract_tool_args_from_events(
756                            tool_name,
757                            Box::pin(response_lines(response)),
758                        )
759                        .await?
760                        .boxed())
761                    })
762                    .boxed()
763            }
764            CloudModel::OpenAi(model) => {
765                let mut request =
766                    request.into_open_ai(model.id().into(), model.max_output_tokens());
767                request.tool_choice = Some(open_ai::ToolChoice::Other(
768                    open_ai::ToolDefinition::Function {
769                        function: open_ai::FunctionDefinition {
770                            name: tool_name.clone(),
771                            description: None,
772                            parameters: None,
773                        },
774                    },
775                ));
776                request.tools = vec![open_ai::ToolDefinition::Function {
777                    function: open_ai::FunctionDefinition {
778                        name: tool_name.clone(),
779                        description: Some(tool_description),
780                        parameters: Some(input_schema),
781                    },
782                }];
783
784                self.request_limiter
785                    .run(async move {
786                        let response = Self::perform_llm_completion(
787                            client.clone(),
788                            llm_api_token,
789                            PerformCompletionParams {
790                                provider: client::LanguageModelProvider::OpenAi,
791                                model: request.model.clone(),
792                                provider_request: RawValue::from_string(serde_json::to_string(
793                                    &request,
794                                )?)?,
795                            },
796                        )
797                        .await?;
798
799                        Ok(open_ai::extract_tool_args_from_events(
800                            tool_name,
801                            Box::pin(response_lines(response)),
802                        )
803                        .await?
804                        .boxed())
805                    })
806                    .boxed()
807            }
808            CloudModel::Google(_) => {
809                future::ready(Err(anyhow!("tool use not implemented for Google AI"))).boxed()
810            }
811        }
812    }
813}
814
815fn response_lines<T: DeserializeOwned>(
816    response: Response<AsyncBody>,
817) -> impl Stream<Item = Result<T>> {
818    futures::stream::try_unfold(
819        (String::new(), BufReader::new(response.into_body())),
820        move |(mut line, mut body)| async {
821            match body.read_line(&mut line).await {
822                Ok(0) => Ok(None),
823                Ok(_) => {
824                    let event: T = serde_json::from_str(&line)?;
825                    line.clear();
826                    Ok(Some((event, (line, body))))
827                }
828                Err(e) => Err(e.into()),
829            }
830        },
831    )
832}
833
834impl LlmApiToken {
835    pub async fn acquire(&self, client: &Arc<Client>) -> Result<String> {
836        let lock = self.0.upgradable_read().await;
837        if let Some(token) = lock.as_ref() {
838            Ok(token.to_string())
839        } else {
840            Self::fetch(RwLockUpgradableReadGuard::upgrade(lock).await, client).await
841        }
842    }
843
844    pub async fn refresh(&self, client: &Arc<Client>) -> Result<String> {
845        Self::fetch(self.0.write().await, client).await
846    }
847
848    async fn fetch<'a>(
849        mut lock: RwLockWriteGuard<'a, Option<String>>,
850        client: &Arc<Client>,
851    ) -> Result<String> {
852        let response = client.request(proto::GetLlmToken {}).await?;
853        *lock = Some(response.token.clone());
854        Ok(response.token.clone())
855    }
856}
857
858struct ConfigurationView {
859    state: gpui::Entity<State>,
860}
861
862impl ConfigurationView {
863    fn authenticate(&mut self, cx: &mut Context<Self>) {
864        self.state.update(cx, |state, cx| {
865            state.authenticate(cx).detach_and_log_err(cx);
866        });
867        cx.notify();
868    }
869}
870
871impl Render for ConfigurationView {
872    fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
873        const ZED_AI_URL: &str = "https://zed.dev/ai";
874
875        let is_connected = !self.state.read(cx).is_signed_out();
876        let plan = self.state.read(cx).user_store.read(cx).current_plan();
877        let has_accepted_terms = self.state.read(cx).has_accepted_terms_of_service(cx);
878
879        let is_pro = plan == Some(proto::Plan::ZedPro);
880        let subscription_text = Label::new(if is_pro {
881            "You have full access to Zed's hosted LLMs, which include models from Anthropic, OpenAI, and Google. They come with faster speeds and higher limits through Zed Pro."
882        } else {
883            "You have basic access to models from Anthropic through the Zed AI Free plan."
884        });
885        let manage_subscription_button = if is_pro {
886            Some(
887                h_flex().child(
888                    Button::new("manage_settings", "Manage Subscription")
889                        .style(ButtonStyle::Tinted(TintColor::Accent))
890                        .on_click(
891                            cx.listener(|_, _, _, cx| cx.open_url(&zed_urls::account_url(cx))),
892                        ),
893                ),
894            )
895        } else if cx.has_flag::<ZedPro>() {
896            Some(
897                h_flex()
898                    .gap_2()
899                    .child(
900                        Button::new("learn_more", "Learn more")
901                            .style(ButtonStyle::Subtle)
902                            .on_click(cx.listener(|_, _, _, cx| cx.open_url(ZED_AI_URL))),
903                    )
904                    .child(
905                        Button::new("upgrade", "Upgrade")
906                            .style(ButtonStyle::Subtle)
907                            .color(Color::Accent)
908                            .on_click(
909                                cx.listener(|_, _, _, cx| cx.open_url(&zed_urls::account_url(cx))),
910                            ),
911                    ),
912            )
913        } else {
914            None
915        };
916
917        if is_connected {
918            v_flex()
919                .gap_3()
920                .w_full()
921                .children(render_accept_terms(
922                    self.state.clone(),
923                    LanguageModelProviderTosView::Configuration,
924                    cx,
925                ))
926                .when(has_accepted_terms, |this| {
927                    this.child(subscription_text)
928                        .children(manage_subscription_button)
929                })
930        } else {
931            v_flex()
932                .gap_2()
933                .child(Label::new("Use Zed AI to access hosted language models."))
934                .child(
935                    Button::new("sign_in", "Sign In")
936                        .icon_color(Color::Muted)
937                        .icon(IconName::Github)
938                        .icon_position(IconPosition::Start)
939                        .on_click(cx.listener(move |this, _, _, cx| this.authenticate(cx))),
940                )
941        }
942    }
943}