cloud.rs

  1use super::open_ai::count_open_ai_tokens;
  2use crate::{
  3    settings::AllLanguageModelSettings, CloudModel, LanguageModel, LanguageModelId,
  4    LanguageModelName, LanguageModelProviderId, LanguageModelProviderName,
  5    LanguageModelProviderState, LanguageModelRequest, RateLimiter, ZedModel,
  6};
  7use anthropic::AnthropicError;
  8use anyhow::{anyhow, Result};
  9use client::{Client, PerformCompletionParams, UserStore, EXPIRED_LLM_TOKEN_HEADER_NAME};
 10use collections::BTreeMap;
 11use feature_flags::{FeatureFlagAppExt, ZedPro};
 12use futures::{
 13    future::BoxFuture, stream::BoxStream, AsyncBufReadExt, FutureExt, Stream, StreamExt,
 14    TryStreamExt as _,
 15};
 16use gpui::{
 17    AnyElement, AnyView, AppContext, AsyncAppContext, FontWeight, Model, ModelContext,
 18    Subscription, Task,
 19};
 20use http_client::{AsyncBody, HttpClient, Method, Response};
 21use schemars::JsonSchema;
 22use serde::{de::DeserializeOwned, Deserialize, Serialize};
 23use serde_json::value::RawValue;
 24use settings::{Settings, SettingsStore};
 25use smol::{
 26    io::{AsyncReadExt, BufReader},
 27    lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard},
 28};
 29use std::{future, sync::Arc};
 30use strum::IntoEnumIterator;
 31use ui::prelude::*;
 32
 33use crate::{LanguageModelAvailability, LanguageModelProvider};
 34
 35use super::anthropic::count_anthropic_tokens;
 36
 37pub const PROVIDER_ID: &str = "zed.dev";
 38pub const PROVIDER_NAME: &str = "Zed";
 39
 40#[derive(Default, Clone, Debug, PartialEq)]
 41pub struct ZedDotDevSettings {
 42    pub available_models: Vec<AvailableModel>,
 43}
 44
 45#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
 46#[serde(rename_all = "lowercase")]
 47pub enum AvailableProvider {
 48    Anthropic,
 49    OpenAi,
 50    Google,
 51}
 52
 53#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
 54pub struct AvailableModel {
 55    provider: AvailableProvider,
 56    name: String,
 57    max_tokens: usize,
 58    tool_override: Option<String>,
 59}
 60
 61pub struct CloudLanguageModelProvider {
 62    client: Arc<Client>,
 63    llm_api_token: LlmApiToken,
 64    state: gpui::Model<State>,
 65    _maintain_client_status: Task<()>,
 66}
 67
 68pub struct State {
 69    client: Arc<Client>,
 70    user_store: Model<UserStore>,
 71    status: client::Status,
 72    accept_terms: Option<Task<Result<()>>>,
 73    _subscription: Subscription,
 74}
 75
 76impl State {
 77    fn is_signed_out(&self) -> bool {
 78        self.status.is_signed_out()
 79    }
 80
 81    fn authenticate(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
 82        let client = self.client.clone();
 83        cx.spawn(move |this, mut cx| async move {
 84            client.authenticate_and_connect(true, &cx).await?;
 85            this.update(&mut cx, |_, cx| cx.notify())
 86        })
 87    }
 88
 89    fn has_accepted_terms_of_service(&self, cx: &AppContext) -> bool {
 90        self.user_store
 91            .read(cx)
 92            .current_user_has_accepted_terms()
 93            .unwrap_or(false)
 94    }
 95
 96    fn accept_terms_of_service(&mut self, cx: &mut ModelContext<Self>) {
 97        let user_store = self.user_store.clone();
 98        self.accept_terms = Some(cx.spawn(move |this, mut cx| async move {
 99            let _ = user_store
100                .update(&mut cx, |store, cx| store.accept_terms_of_service(cx))?
101                .await;
102            this.update(&mut cx, |this, cx| {
103                this.accept_terms = None;
104                cx.notify()
105            })
106        }));
107    }
108}
109
110impl CloudLanguageModelProvider {
111    pub fn new(user_store: Model<UserStore>, client: Arc<Client>, cx: &mut AppContext) -> Self {
112        let mut status_rx = client.status();
113        let status = *status_rx.borrow();
114
115        let state = cx.new_model(|cx| State {
116            client: client.clone(),
117            user_store,
118            status,
119            accept_terms: None,
120            _subscription: cx.observe_global::<SettingsStore>(|_, cx| {
121                cx.notify();
122            }),
123        });
124
125        let state_ref = state.downgrade();
126        let maintain_client_status = cx.spawn(|mut cx| async move {
127            while let Some(status) = status_rx.next().await {
128                if let Some(this) = state_ref.upgrade() {
129                    _ = this.update(&mut cx, |this, cx| {
130                        if this.status != status {
131                            this.status = status;
132                            cx.notify();
133                        }
134                    });
135                } else {
136                    break;
137                }
138            }
139        });
140
141        Self {
142            client,
143            state,
144            llm_api_token: LlmApiToken::default(),
145            _maintain_client_status: maintain_client_status,
146        }
147    }
148}
149
150impl LanguageModelProviderState for CloudLanguageModelProvider {
151    type ObservableEntity = State;
152
153    fn observable_entity(&self) -> Option<gpui::Model<Self::ObservableEntity>> {
154        Some(self.state.clone())
155    }
156}
157
158impl LanguageModelProvider for CloudLanguageModelProvider {
159    fn id(&self) -> LanguageModelProviderId {
160        LanguageModelProviderId(PROVIDER_ID.into())
161    }
162
163    fn name(&self) -> LanguageModelProviderName {
164        LanguageModelProviderName(PROVIDER_NAME.into())
165    }
166
167    fn icon(&self) -> IconName {
168        IconName::AiZed
169    }
170
171    fn provided_models(&self, cx: &AppContext) -> Vec<Arc<dyn LanguageModel>> {
172        let mut models = BTreeMap::default();
173
174        if cx.is_staff() {
175            for model in anthropic::Model::iter() {
176                if !matches!(model, anthropic::Model::Custom { .. }) {
177                    models.insert(model.id().to_string(), CloudModel::Anthropic(model));
178                }
179            }
180            for model in open_ai::Model::iter() {
181                if !matches!(model, open_ai::Model::Custom { .. }) {
182                    models.insert(model.id().to_string(), CloudModel::OpenAi(model));
183                }
184            }
185            for model in google_ai::Model::iter() {
186                if !matches!(model, google_ai::Model::Custom { .. }) {
187                    models.insert(model.id().to_string(), CloudModel::Google(model));
188                }
189            }
190            for model in ZedModel::iter() {
191                models.insert(model.id().to_string(), CloudModel::Zed(model));
192            }
193
194            // Override with available models from settings
195            for model in &AllLanguageModelSettings::get_global(cx)
196                .zed_dot_dev
197                .available_models
198            {
199                let model = match model.provider {
200                    AvailableProvider::Anthropic => {
201                        CloudModel::Anthropic(anthropic::Model::Custom {
202                            name: model.name.clone(),
203                            max_tokens: model.max_tokens,
204                            tool_override: model.tool_override.clone(),
205                        })
206                    }
207                    AvailableProvider::OpenAi => CloudModel::OpenAi(open_ai::Model::Custom {
208                        name: model.name.clone(),
209                        max_tokens: model.max_tokens,
210                    }),
211                    AvailableProvider::Google => CloudModel::Google(google_ai::Model::Custom {
212                        name: model.name.clone(),
213                        max_tokens: model.max_tokens,
214                    }),
215                };
216                models.insert(model.id().to_string(), model.clone());
217            }
218        } else {
219            models.insert(
220                anthropic::Model::Claude3_5Sonnet.id().to_string(),
221                CloudModel::Anthropic(anthropic::Model::Claude3_5Sonnet),
222            );
223        }
224
225        models
226            .into_values()
227            .map(|model| {
228                Arc::new(CloudLanguageModel {
229                    id: LanguageModelId::from(model.id().to_string()),
230                    model,
231                    llm_api_token: self.llm_api_token.clone(),
232                    client: self.client.clone(),
233                    request_limiter: RateLimiter::new(4),
234                }) as Arc<dyn LanguageModel>
235            })
236            .collect()
237    }
238
239    fn is_authenticated(&self, cx: &AppContext) -> bool {
240        !self.state.read(cx).is_signed_out()
241    }
242
243    fn authenticate(&self, _cx: &mut AppContext) -> Task<Result<()>> {
244        Task::ready(Ok(()))
245    }
246
247    fn configuration_view(&self, cx: &mut WindowContext) -> AnyView {
248        cx.new_view(|_cx| ConfigurationView {
249            state: self.state.clone(),
250        })
251        .into()
252    }
253
254    fn must_accept_terms(&self, cx: &AppContext) -> bool {
255        !self.state.read(cx).has_accepted_terms_of_service(cx)
256    }
257
258    fn render_accept_terms(&self, cx: &mut WindowContext) -> Option<AnyElement> {
259        let state = self.state.read(cx);
260
261        let terms = [(
262            "terms_of_service",
263            "Terms of Service",
264            "https://zed.dev/terms-of-service",
265        )]
266        .map(|(id, label, url)| {
267            Button::new(id, label)
268                .style(ButtonStyle::Subtle)
269                .icon(IconName::ExternalLink)
270                .icon_size(IconSize::XSmall)
271                .icon_color(Color::Muted)
272                .on_click(move |_, cx| cx.open_url(url))
273        });
274
275        if state.has_accepted_terms_of_service(cx) {
276            None
277        } else {
278            let disabled = state.accept_terms.is_some();
279            Some(
280                v_flex()
281                    .gap_2()
282                    .child(
283                        v_flex()
284                            .child(Label::new("Terms and Conditions").weight(FontWeight::MEDIUM))
285                            .child(
286                                Label::new(
287                                    "Please read and accept our terms and conditions to continue.",
288                                )
289                                .size(LabelSize::Small),
290                            ),
291                    )
292                    .child(v_flex().gap_1().children(terms))
293                    .child(
294                        h_flex().justify_end().child(
295                            Button::new("accept_terms", "I've read it and accept it")
296                                .disabled(disabled)
297                                .on_click({
298                                    let state = self.state.downgrade();
299                                    move |_, cx| {
300                                        state
301                                            .update(cx, |state, cx| {
302                                                state.accept_terms_of_service(cx)
303                                            })
304                                            .ok();
305                                    }
306                                }),
307                        ),
308                    )
309                    .into_any(),
310            )
311        }
312    }
313
314    fn reset_credentials(&self, _cx: &mut AppContext) -> Task<Result<()>> {
315        Task::ready(Ok(()))
316    }
317}
318
319pub struct CloudLanguageModel {
320    id: LanguageModelId,
321    model: CloudModel,
322    llm_api_token: LlmApiToken,
323    client: Arc<Client>,
324    request_limiter: RateLimiter,
325}
326
327#[derive(Clone, Default)]
328struct LlmApiToken(Arc<RwLock<Option<String>>>);
329
330impl CloudLanguageModel {
331    async fn perform_llm_completion(
332        client: Arc<Client>,
333        llm_api_token: LlmApiToken,
334        body: PerformCompletionParams,
335    ) -> Result<Response<AsyncBody>> {
336        let http_client = &client.http_client();
337
338        let mut token = llm_api_token.acquire(&client).await?;
339        let mut did_retry = false;
340
341        let response = loop {
342            let request = http_client::Request::builder()
343                .method(Method::POST)
344                .uri(http_client.build_zed_llm_url("/completion", &[])?.as_ref())
345                .header("Content-Type", "application/json")
346                .header("Authorization", format!("Bearer {token}"))
347                .body(serde_json::to_string(&body)?.into())?;
348            let mut response = http_client.send(request).await?;
349            if response.status().is_success() {
350                break response;
351            } else if !did_retry
352                && response
353                    .headers()
354                    .get(EXPIRED_LLM_TOKEN_HEADER_NAME)
355                    .is_some()
356            {
357                did_retry = true;
358                token = llm_api_token.refresh(&client).await?;
359            } else {
360                let mut body = String::new();
361                response.body_mut().read_to_string(&mut body).await?;
362                break Err(anyhow!(
363                    "cloud language model completion failed with status {}: {body}",
364                    response.status()
365                ))?;
366            }
367        };
368
369        Ok(response)
370    }
371}
372
373impl LanguageModel for CloudLanguageModel {
374    fn id(&self) -> LanguageModelId {
375        self.id.clone()
376    }
377
378    fn name(&self) -> LanguageModelName {
379        LanguageModelName::from(self.model.display_name().to_string())
380    }
381
382    fn provider_id(&self) -> LanguageModelProviderId {
383        LanguageModelProviderId(PROVIDER_ID.into())
384    }
385
386    fn provider_name(&self) -> LanguageModelProviderName {
387        LanguageModelProviderName(PROVIDER_NAME.into())
388    }
389
390    fn telemetry_id(&self) -> String {
391        format!("zed.dev/{}", self.model.id())
392    }
393
394    fn availability(&self) -> LanguageModelAvailability {
395        self.model.availability()
396    }
397
398    fn max_token_count(&self) -> usize {
399        self.model.max_token_count()
400    }
401
402    fn count_tokens(
403        &self,
404        request: LanguageModelRequest,
405        cx: &AppContext,
406    ) -> BoxFuture<'static, Result<usize>> {
407        match self.model.clone() {
408            CloudModel::Anthropic(_) => count_anthropic_tokens(request, cx),
409            CloudModel::OpenAi(model) => count_open_ai_tokens(request, model, cx),
410            CloudModel::Google(model) => {
411                let client = self.client.clone();
412                let request = request.into_google(model.id().into());
413                let request = google_ai::CountTokensRequest {
414                    contents: request.contents,
415                };
416                async move {
417                    let request = serde_json::to_string(&request)?;
418                    let response = client
419                        .request(proto::CountLanguageModelTokens {
420                            provider: proto::LanguageModelProvider::Google as i32,
421                            request,
422                        })
423                        .await?;
424                    Ok(response.token_count as usize)
425                }
426                .boxed()
427            }
428            CloudModel::Zed(_) => {
429                count_open_ai_tokens(request, open_ai::Model::ThreePointFiveTurbo, cx)
430            }
431        }
432    }
433
434    fn stream_completion(
435        &self,
436        request: LanguageModelRequest,
437        _cx: &AsyncAppContext,
438    ) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
439        match &self.model {
440            CloudModel::Anthropic(model) => {
441                let request = request.into_anthropic(model.id().into());
442                let client = self.client.clone();
443                let llm_api_token = self.llm_api_token.clone();
444                let future = self.request_limiter.stream(async move {
445                    let response = Self::perform_llm_completion(
446                        client.clone(),
447                        llm_api_token,
448                        PerformCompletionParams {
449                            provider: client::LanguageModelProvider::Anthropic,
450                            model: request.model.clone(),
451                            provider_request: RawValue::from_string(serde_json::to_string(
452                                &request,
453                            )?)?,
454                        },
455                    )
456                    .await?;
457                    Ok(anthropic::extract_text_from_events(
458                        response_lines(response).map_err(AnthropicError::Other),
459                    ))
460                });
461                async move {
462                    Ok(future
463                        .await?
464                        .map(|result| result.map_err(|err| anyhow!(err)))
465                        .boxed())
466                }
467                .boxed()
468            }
469            CloudModel::OpenAi(model) => {
470                let client = self.client.clone();
471                let request = request.into_open_ai(model.id().into());
472                let llm_api_token = self.llm_api_token.clone();
473                let future = self.request_limiter.stream(async move {
474                    let response = Self::perform_llm_completion(
475                        client.clone(),
476                        llm_api_token,
477                        PerformCompletionParams {
478                            provider: client::LanguageModelProvider::OpenAi,
479                            model: request.model.clone(),
480                            provider_request: RawValue::from_string(serde_json::to_string(
481                                &request,
482                            )?)?,
483                        },
484                    )
485                    .await?;
486                    Ok(open_ai::extract_text_from_events(response_lines(response)))
487                });
488                async move { Ok(future.await?.boxed()) }.boxed()
489            }
490            CloudModel::Google(model) => {
491                let client = self.client.clone();
492                let request = request.into_google(model.id().into());
493                let llm_api_token = self.llm_api_token.clone();
494                let future = self.request_limiter.stream(async move {
495                    let response = Self::perform_llm_completion(
496                        client.clone(),
497                        llm_api_token,
498                        PerformCompletionParams {
499                            provider: client::LanguageModelProvider::Google,
500                            model: request.model.clone(),
501                            provider_request: RawValue::from_string(serde_json::to_string(
502                                &request,
503                            )?)?,
504                        },
505                    )
506                    .await?;
507                    Ok(google_ai::extract_text_from_events(response_lines(
508                        response,
509                    )))
510                });
511                async move { Ok(future.await?.boxed()) }.boxed()
512            }
513            CloudModel::Zed(model) => {
514                let client = self.client.clone();
515                let mut request = request.into_open_ai(model.id().into());
516                request.max_tokens = Some(4000);
517                let llm_api_token = self.llm_api_token.clone();
518                let future = self.request_limiter.stream(async move {
519                    let response = Self::perform_llm_completion(
520                        client.clone(),
521                        llm_api_token,
522                        PerformCompletionParams {
523                            provider: client::LanguageModelProvider::Zed,
524                            model: request.model.clone(),
525                            provider_request: RawValue::from_string(serde_json::to_string(
526                                &request,
527                            )?)?,
528                        },
529                    )
530                    .await?;
531                    Ok(open_ai::extract_text_from_events(response_lines(response)))
532                });
533                async move { Ok(future.await?.boxed()) }.boxed()
534            }
535        }
536    }
537
538    fn use_any_tool(
539        &self,
540        request: LanguageModelRequest,
541        tool_name: String,
542        tool_description: String,
543        input_schema: serde_json::Value,
544        _cx: &AsyncAppContext,
545    ) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
546        let client = self.client.clone();
547        let llm_api_token = self.llm_api_token.clone();
548
549        match &self.model {
550            CloudModel::Anthropic(model) => {
551                let mut request = request.into_anthropic(model.tool_model_id().into());
552                request.tool_choice = Some(anthropic::ToolChoice::Tool {
553                    name: tool_name.clone(),
554                });
555                request.tools = vec![anthropic::Tool {
556                    name: tool_name.clone(),
557                    description: tool_description,
558                    input_schema,
559                }];
560
561                self.request_limiter
562                    .run(async move {
563                        let response = Self::perform_llm_completion(
564                            client.clone(),
565                            llm_api_token,
566                            PerformCompletionParams {
567                                provider: client::LanguageModelProvider::Anthropic,
568                                model: request.model.clone(),
569                                provider_request: RawValue::from_string(serde_json::to_string(
570                                    &request,
571                                )?)?,
572                            },
573                        )
574                        .await?;
575
576                        Ok(anthropic::extract_tool_args_from_events(
577                            tool_name,
578                            Box::pin(response_lines(response)),
579                        )
580                        .await?
581                        .boxed())
582                    })
583                    .boxed()
584            }
585            CloudModel::OpenAi(model) => {
586                let mut request = request.into_open_ai(model.id().into());
587                request.tool_choice = Some(open_ai::ToolChoice::Other(
588                    open_ai::ToolDefinition::Function {
589                        function: open_ai::FunctionDefinition {
590                            name: tool_name.clone(),
591                            description: None,
592                            parameters: None,
593                        },
594                    },
595                ));
596                request.tools = vec![open_ai::ToolDefinition::Function {
597                    function: open_ai::FunctionDefinition {
598                        name: tool_name.clone(),
599                        description: Some(tool_description),
600                        parameters: Some(input_schema),
601                    },
602                }];
603
604                self.request_limiter
605                    .run(async move {
606                        let response = Self::perform_llm_completion(
607                            client.clone(),
608                            llm_api_token,
609                            PerformCompletionParams {
610                                provider: client::LanguageModelProvider::OpenAi,
611                                model: request.model.clone(),
612                                provider_request: RawValue::from_string(serde_json::to_string(
613                                    &request,
614                                )?)?,
615                            },
616                        )
617                        .await?;
618
619                        Ok(open_ai::extract_tool_args_from_events(
620                            tool_name,
621                            Box::pin(response_lines(response)),
622                        )
623                        .await?
624                        .boxed())
625                    })
626                    .boxed()
627            }
628            CloudModel::Google(_) => {
629                future::ready(Err(anyhow!("tool use not implemented for Google AI"))).boxed()
630            }
631            CloudModel::Zed(model) => {
632                // All Zed models are OpenAI-based at the time of writing.
633                let mut request = request.into_open_ai(model.id().into());
634                request.tool_choice = Some(open_ai::ToolChoice::Other(
635                    open_ai::ToolDefinition::Function {
636                        function: open_ai::FunctionDefinition {
637                            name: tool_name.clone(),
638                            description: None,
639                            parameters: None,
640                        },
641                    },
642                ));
643                request.tools = vec![open_ai::ToolDefinition::Function {
644                    function: open_ai::FunctionDefinition {
645                        name: tool_name.clone(),
646                        description: Some(tool_description),
647                        parameters: Some(input_schema),
648                    },
649                }];
650
651                self.request_limiter
652                    .run(async move {
653                        let response = Self::perform_llm_completion(
654                            client.clone(),
655                            llm_api_token,
656                            PerformCompletionParams {
657                                provider: client::LanguageModelProvider::Zed,
658                                model: request.model.clone(),
659                                provider_request: RawValue::from_string(serde_json::to_string(
660                                    &request,
661                                )?)?,
662                            },
663                        )
664                        .await?;
665
666                        Ok(open_ai::extract_tool_args_from_events(
667                            tool_name,
668                            Box::pin(response_lines(response)),
669                        )
670                        .await?
671                        .boxed())
672                    })
673                    .boxed()
674            }
675        }
676    }
677}
678
679fn response_lines<T: DeserializeOwned>(
680    response: Response<AsyncBody>,
681) -> impl Stream<Item = Result<T>> {
682    futures::stream::try_unfold(
683        (String::new(), BufReader::new(response.into_body())),
684        move |(mut line, mut body)| async {
685            match body.read_line(&mut line).await {
686                Ok(0) => Ok(None),
687                Ok(_) => {
688                    let event: T = serde_json::from_str(&line)?;
689                    line.clear();
690                    Ok(Some((event, (line, body))))
691                }
692                Err(e) => Err(e.into()),
693            }
694        },
695    )
696}
697
698impl LlmApiToken {
699    async fn acquire(&self, client: &Arc<Client>) -> Result<String> {
700        let lock = self.0.upgradable_read().await;
701        if let Some(token) = lock.as_ref() {
702            Ok(token.to_string())
703        } else {
704            Self::fetch(RwLockUpgradableReadGuard::upgrade(lock).await, &client).await
705        }
706    }
707
708    async fn refresh(&self, client: &Arc<Client>) -> Result<String> {
709        Self::fetch(self.0.write().await, &client).await
710    }
711
712    async fn fetch<'a>(
713        mut lock: RwLockWriteGuard<'a, Option<String>>,
714        client: &Arc<Client>,
715    ) -> Result<String> {
716        let response = client.request(proto::GetLlmToken {}).await?;
717        *lock = Some(response.token.clone());
718        Ok(response.token.clone())
719    }
720}
721
722struct ConfigurationView {
723    state: gpui::Model<State>,
724}
725
726impl ConfigurationView {
727    fn authenticate(&mut self, cx: &mut ViewContext<Self>) {
728        self.state.update(cx, |state, cx| {
729            state.authenticate(cx).detach_and_log_err(cx);
730        });
731        cx.notify();
732    }
733}
734
735impl Render for ConfigurationView {
736    fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
737        const ZED_AI_URL: &str = "https://zed.dev/ai";
738        const ACCOUNT_SETTINGS_URL: &str = "https://zed.dev/account";
739
740        let is_connected = !self.state.read(cx).is_signed_out();
741        let plan = self.state.read(cx).user_store.read(cx).current_plan();
742        let must_accept_terms = !self.state.read(cx).has_accepted_terms_of_service(cx);
743
744        let is_pro = plan == Some(proto::Plan::ZedPro);
745
746        if is_connected {
747            v_flex()
748                .gap_3()
749                .max_w_4_5()
750                .when(must_accept_terms, |this| {
751                    this.child(Label::new(
752                        "You must accept the terms of service to use this provider.",
753                    ))
754                })
755                .child(Label::new(
756                    if is_pro {
757                        "You have full access to Zed's hosted models from Anthropic, OpenAI, Google with faster speeds and higher limits through Zed Pro."
758                    } else {
759                        "You have basic access to models from Anthropic through the Zed AI Free plan."
760                    }))
761                .children(if is_pro {
762                    Some(
763                        h_flex().child(
764                            Button::new("manage_settings", "Manage Subscription")
765                                .style(ButtonStyle::Filled)
766                                .on_click(
767                                    cx.listener(|_, _, cx| cx.open_url(ACCOUNT_SETTINGS_URL)),
768                                ),
769                        ),
770                    )
771                } else if cx.has_flag::<ZedPro>() {
772                    Some(
773                        h_flex()
774                            .gap_2()
775                            .child(
776                                Button::new("learn_more", "Learn more")
777                                    .style(ButtonStyle::Subtle)
778                                    .on_click(cx.listener(|_, _, cx| cx.open_url(ZED_AI_URL))),
779                            )
780                            .child(
781                                Button::new("upgrade", "Upgrade")
782                                    .style(ButtonStyle::Subtle)
783                                    .color(Color::Accent)
784                                    .on_click(
785                                        cx.listener(|_, _, cx| cx.open_url(ACCOUNT_SETTINGS_URL)),
786                                    ),
787                            ),
788                    )
789                } else {
790                    None
791                })
792        } else {
793            v_flex()
794                .gap_6()
795                .child(Label::new("Use the zed.dev to access language models."))
796                .child(
797                    v_flex()
798                        .gap_2()
799                        .child(
800                            Button::new("sign_in", "Sign in")
801                                .icon_color(Color::Muted)
802                                .icon(IconName::Github)
803                                .icon_position(IconPosition::Start)
804                                .style(ButtonStyle::Filled)
805                                .full_width()
806                                .on_click(cx.listener(move |this, _, cx| this.authenticate(cx))),
807                        )
808                        .child(
809                            div().flex().w_full().items_center().child(
810                                Label::new("Sign in to enable collaboration.")
811                                    .color(Color::Muted)
812                                    .size(LabelSize::Small),
813                            ),
814                        ),
815                )
816        }
817    }
818}