1use super::open_ai::count_open_ai_tokens;
2use crate::{
3 settings::AllLanguageModelSettings, CloudModel, LanguageModel, LanguageModelCacheConfiguration,
4 LanguageModelId, LanguageModelName, LanguageModelProviderId, LanguageModelProviderName,
5 LanguageModelProviderState, LanguageModelRequest, RateLimiter, ZedModel,
6};
7use anthropic::AnthropicError;
8use anyhow::{anyhow, Result};
9use client::{Client, PerformCompletionParams, UserStore, EXPIRED_LLM_TOKEN_HEADER_NAME};
10use collections::BTreeMap;
11use feature_flags::{FeatureFlagAppExt, ZedPro};
12use futures::{
13 future::BoxFuture, stream::BoxStream, AsyncBufReadExt, FutureExt, Stream, StreamExt,
14 TryStreamExt as _,
15};
16use gpui::{
17 AnyElement, AnyView, AppContext, AsyncAppContext, FontWeight, Model, ModelContext,
18 Subscription, Task,
19};
20use http_client::{AsyncBody, HttpClient, Method, Response};
21use schemars::JsonSchema;
22use serde::{de::DeserializeOwned, Deserialize, Serialize};
23use serde_json::value::RawValue;
24use settings::{Settings, SettingsStore};
25use smol::{
26 io::{AsyncReadExt, BufReader},
27 lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard},
28};
29use std::{future, sync::Arc};
30use strum::IntoEnumIterator;
31use ui::prelude::*;
32
33use crate::{LanguageModelAvailability, LanguageModelProvider};
34
35use super::anthropic::count_anthropic_tokens;
36
37pub const PROVIDER_ID: &str = "zed.dev";
38pub const PROVIDER_NAME: &str = "Zed";
39
40#[derive(Default, Clone, Debug, PartialEq)]
41pub struct ZedDotDevSettings {
42 pub available_models: Vec<AvailableModel>,
43}
44
45#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
46#[serde(rename_all = "lowercase")]
47pub enum AvailableProvider {
48 Anthropic,
49 OpenAi,
50 Google,
51}
52
53#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
54pub struct AvailableModel {
55 provider: AvailableProvider,
56 name: String,
57 max_tokens: usize,
58 tool_override: Option<String>,
59 cache_configuration: Option<LanguageModelCacheConfiguration>,
60 max_output_tokens: Option<u32>,
61}
62
63pub struct CloudLanguageModelProvider {
64 client: Arc<Client>,
65 llm_api_token: LlmApiToken,
66 state: gpui::Model<State>,
67 _maintain_client_status: Task<()>,
68}
69
70pub struct State {
71 client: Arc<Client>,
72 user_store: Model<UserStore>,
73 status: client::Status,
74 accept_terms: Option<Task<Result<()>>>,
75 _subscription: Subscription,
76}
77
78impl State {
79 fn is_signed_out(&self) -> bool {
80 self.status.is_signed_out()
81 }
82
83 fn authenticate(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
84 let client = self.client.clone();
85 cx.spawn(move |this, mut cx| async move {
86 client.authenticate_and_connect(true, &cx).await?;
87 this.update(&mut cx, |_, cx| cx.notify())
88 })
89 }
90
91 fn has_accepted_terms_of_service(&self, cx: &AppContext) -> bool {
92 self.user_store
93 .read(cx)
94 .current_user_has_accepted_terms()
95 .unwrap_or(false)
96 }
97
98 fn accept_terms_of_service(&mut self, cx: &mut ModelContext<Self>) {
99 let user_store = self.user_store.clone();
100 self.accept_terms = Some(cx.spawn(move |this, mut cx| async move {
101 let _ = user_store
102 .update(&mut cx, |store, cx| store.accept_terms_of_service(cx))?
103 .await;
104 this.update(&mut cx, |this, cx| {
105 this.accept_terms = None;
106 cx.notify()
107 })
108 }));
109 }
110}
111
112impl CloudLanguageModelProvider {
113 pub fn new(user_store: Model<UserStore>, client: Arc<Client>, cx: &mut AppContext) -> Self {
114 let mut status_rx = client.status();
115 let status = *status_rx.borrow();
116
117 let state = cx.new_model(|cx| State {
118 client: client.clone(),
119 user_store,
120 status,
121 accept_terms: None,
122 _subscription: cx.observe_global::<SettingsStore>(|_, cx| {
123 cx.notify();
124 }),
125 });
126
127 let state_ref = state.downgrade();
128 let maintain_client_status = cx.spawn(|mut cx| async move {
129 while let Some(status) = status_rx.next().await {
130 if let Some(this) = state_ref.upgrade() {
131 _ = this.update(&mut cx, |this, cx| {
132 if this.status != status {
133 this.status = status;
134 cx.notify();
135 }
136 });
137 } else {
138 break;
139 }
140 }
141 });
142
143 Self {
144 client,
145 state,
146 llm_api_token: LlmApiToken::default(),
147 _maintain_client_status: maintain_client_status,
148 }
149 }
150}
151
152impl LanguageModelProviderState for CloudLanguageModelProvider {
153 type ObservableEntity = State;
154
155 fn observable_entity(&self) -> Option<gpui::Model<Self::ObservableEntity>> {
156 Some(self.state.clone())
157 }
158}
159
160impl LanguageModelProvider for CloudLanguageModelProvider {
161 fn id(&self) -> LanguageModelProviderId {
162 LanguageModelProviderId(PROVIDER_ID.into())
163 }
164
165 fn name(&self) -> LanguageModelProviderName {
166 LanguageModelProviderName(PROVIDER_NAME.into())
167 }
168
169 fn icon(&self) -> IconName {
170 IconName::AiZed
171 }
172
173 fn provided_models(&self, cx: &AppContext) -> Vec<Arc<dyn LanguageModel>> {
174 let mut models = BTreeMap::default();
175
176 if cx.is_staff() {
177 for model in anthropic::Model::iter() {
178 if !matches!(model, anthropic::Model::Custom { .. }) {
179 models.insert(model.id().to_string(), CloudModel::Anthropic(model));
180 }
181 }
182 for model in open_ai::Model::iter() {
183 if !matches!(model, open_ai::Model::Custom { .. }) {
184 models.insert(model.id().to_string(), CloudModel::OpenAi(model));
185 }
186 }
187 for model in google_ai::Model::iter() {
188 if !matches!(model, google_ai::Model::Custom { .. }) {
189 models.insert(model.id().to_string(), CloudModel::Google(model));
190 }
191 }
192 for model in ZedModel::iter() {
193 models.insert(model.id().to_string(), CloudModel::Zed(model));
194 }
195
196 // Override with available models from settings
197 for model in &AllLanguageModelSettings::get_global(cx)
198 .zed_dot_dev
199 .available_models
200 {
201 let model = match model.provider {
202 AvailableProvider::Anthropic => {
203 CloudModel::Anthropic(anthropic::Model::Custom {
204 name: model.name.clone(),
205 max_tokens: model.max_tokens,
206 tool_override: model.tool_override.clone(),
207 cache_configuration: model.cache_configuration.as_ref().map(|config| {
208 anthropic::AnthropicModelCacheConfiguration {
209 max_cache_anchors: config.max_cache_anchors,
210 should_speculate: config.should_speculate,
211 min_total_token: config.min_total_token,
212 }
213 }),
214 max_output_tokens: model.max_output_tokens,
215 })
216 }
217 AvailableProvider::OpenAi => CloudModel::OpenAi(open_ai::Model::Custom {
218 name: model.name.clone(),
219 max_tokens: model.max_tokens,
220 }),
221 AvailableProvider::Google => CloudModel::Google(google_ai::Model::Custom {
222 name: model.name.clone(),
223 max_tokens: model.max_tokens,
224 }),
225 };
226 models.insert(model.id().to_string(), model.clone());
227 }
228 } else {
229 models.insert(
230 anthropic::Model::Claude3_5Sonnet.id().to_string(),
231 CloudModel::Anthropic(anthropic::Model::Claude3_5Sonnet),
232 );
233 }
234
235 models
236 .into_values()
237 .map(|model| {
238 Arc::new(CloudLanguageModel {
239 id: LanguageModelId::from(model.id().to_string()),
240 model,
241 llm_api_token: self.llm_api_token.clone(),
242 client: self.client.clone(),
243 request_limiter: RateLimiter::new(4),
244 }) as Arc<dyn LanguageModel>
245 })
246 .collect()
247 }
248
249 fn is_authenticated(&self, cx: &AppContext) -> bool {
250 !self.state.read(cx).is_signed_out()
251 }
252
253 fn authenticate(&self, _cx: &mut AppContext) -> Task<Result<()>> {
254 Task::ready(Ok(()))
255 }
256
257 fn configuration_view(&self, cx: &mut WindowContext) -> AnyView {
258 cx.new_view(|_cx| ConfigurationView {
259 state: self.state.clone(),
260 })
261 .into()
262 }
263
264 fn must_accept_terms(&self, cx: &AppContext) -> bool {
265 !self.state.read(cx).has_accepted_terms_of_service(cx)
266 }
267
268 fn render_accept_terms(&self, cx: &mut WindowContext) -> Option<AnyElement> {
269 let state = self.state.read(cx);
270
271 let terms = [(
272 "terms_of_service",
273 "Terms of Service",
274 "https://zed.dev/terms-of-service",
275 )]
276 .map(|(id, label, url)| {
277 Button::new(id, label)
278 .style(ButtonStyle::Subtle)
279 .icon(IconName::ExternalLink)
280 .icon_size(IconSize::XSmall)
281 .icon_color(Color::Muted)
282 .on_click(move |_, cx| cx.open_url(url))
283 });
284
285 if state.has_accepted_terms_of_service(cx) {
286 None
287 } else {
288 let disabled = state.accept_terms.is_some();
289 Some(
290 v_flex()
291 .gap_2()
292 .child(
293 v_flex()
294 .child(Label::new("Terms and Conditions").weight(FontWeight::MEDIUM))
295 .child(
296 Label::new(
297 "Please read and accept our terms and conditions to continue.",
298 )
299 .size(LabelSize::Small),
300 ),
301 )
302 .child(v_flex().gap_1().children(terms))
303 .child(
304 h_flex().justify_end().child(
305 Button::new("accept_terms", "I've read it and accept it")
306 .disabled(disabled)
307 .on_click({
308 let state = self.state.downgrade();
309 move |_, cx| {
310 state
311 .update(cx, |state, cx| {
312 state.accept_terms_of_service(cx)
313 })
314 .ok();
315 }
316 }),
317 ),
318 )
319 .into_any(),
320 )
321 }
322 }
323
324 fn reset_credentials(&self, _cx: &mut AppContext) -> Task<Result<()>> {
325 Task::ready(Ok(()))
326 }
327}
328
329pub struct CloudLanguageModel {
330 id: LanguageModelId,
331 model: CloudModel,
332 llm_api_token: LlmApiToken,
333 client: Arc<Client>,
334 request_limiter: RateLimiter,
335}
336
337#[derive(Clone, Default)]
338struct LlmApiToken(Arc<RwLock<Option<String>>>);
339
340impl CloudLanguageModel {
341 async fn perform_llm_completion(
342 client: Arc<Client>,
343 llm_api_token: LlmApiToken,
344 body: PerformCompletionParams,
345 ) -> Result<Response<AsyncBody>> {
346 let http_client = &client.http_client();
347
348 let mut token = llm_api_token.acquire(&client).await?;
349 let mut did_retry = false;
350
351 let response = loop {
352 let request = http_client::Request::builder()
353 .method(Method::POST)
354 .uri(http_client.build_zed_llm_url("/completion", &[])?.as_ref())
355 .header("Content-Type", "application/json")
356 .header("Authorization", format!("Bearer {token}"))
357 .body(serde_json::to_string(&body)?.into())?;
358 let mut response = http_client.send(request).await?;
359 if response.status().is_success() {
360 break response;
361 } else if !did_retry
362 && response
363 .headers()
364 .get(EXPIRED_LLM_TOKEN_HEADER_NAME)
365 .is_some()
366 {
367 did_retry = true;
368 token = llm_api_token.refresh(&client).await?;
369 } else {
370 let mut body = String::new();
371 response.body_mut().read_to_string(&mut body).await?;
372 break Err(anyhow!(
373 "cloud language model completion failed with status {}: {body}",
374 response.status()
375 ))?;
376 }
377 };
378
379 Ok(response)
380 }
381}
382
383impl LanguageModel for CloudLanguageModel {
384 fn id(&self) -> LanguageModelId {
385 self.id.clone()
386 }
387
388 fn name(&self) -> LanguageModelName {
389 LanguageModelName::from(self.model.display_name().to_string())
390 }
391
392 fn provider_id(&self) -> LanguageModelProviderId {
393 LanguageModelProviderId(PROVIDER_ID.into())
394 }
395
396 fn provider_name(&self) -> LanguageModelProviderName {
397 LanguageModelProviderName(PROVIDER_NAME.into())
398 }
399
400 fn telemetry_id(&self) -> String {
401 format!("zed.dev/{}", self.model.id())
402 }
403
404 fn availability(&self) -> LanguageModelAvailability {
405 self.model.availability()
406 }
407
408 fn max_token_count(&self) -> usize {
409 self.model.max_token_count()
410 }
411
412 fn count_tokens(
413 &self,
414 request: LanguageModelRequest,
415 cx: &AppContext,
416 ) -> BoxFuture<'static, Result<usize>> {
417 match self.model.clone() {
418 CloudModel::Anthropic(_) => count_anthropic_tokens(request, cx),
419 CloudModel::OpenAi(model) => count_open_ai_tokens(request, model, cx),
420 CloudModel::Google(model) => {
421 let client = self.client.clone();
422 let request = request.into_google(model.id().into());
423 let request = google_ai::CountTokensRequest {
424 contents: request.contents,
425 };
426 async move {
427 let request = serde_json::to_string(&request)?;
428 let response = client
429 .request(proto::CountLanguageModelTokens {
430 provider: proto::LanguageModelProvider::Google as i32,
431 request,
432 })
433 .await?;
434 Ok(response.token_count as usize)
435 }
436 .boxed()
437 }
438 CloudModel::Zed(_) => {
439 count_open_ai_tokens(request, open_ai::Model::ThreePointFiveTurbo, cx)
440 }
441 }
442 }
443
444 fn stream_completion(
445 &self,
446 request: LanguageModelRequest,
447 _cx: &AsyncAppContext,
448 ) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
449 match &self.model {
450 CloudModel::Anthropic(model) => {
451 let request = request.into_anthropic(model.id().into(), model.max_output_tokens());
452 let client = self.client.clone();
453 let llm_api_token = self.llm_api_token.clone();
454 let future = self.request_limiter.stream(async move {
455 let response = Self::perform_llm_completion(
456 client.clone(),
457 llm_api_token,
458 PerformCompletionParams {
459 provider: client::LanguageModelProvider::Anthropic,
460 model: request.model.clone(),
461 provider_request: RawValue::from_string(serde_json::to_string(
462 &request,
463 )?)?,
464 },
465 )
466 .await?;
467 Ok(anthropic::extract_text_from_events(
468 response_lines(response).map_err(AnthropicError::Other),
469 ))
470 });
471 async move {
472 Ok(future
473 .await?
474 .map(|result| result.map_err(|err| anyhow!(err)))
475 .boxed())
476 }
477 .boxed()
478 }
479 CloudModel::OpenAi(model) => {
480 let client = self.client.clone();
481 let request = request.into_open_ai(model.id().into());
482 let llm_api_token = self.llm_api_token.clone();
483 let future = self.request_limiter.stream(async move {
484 let response = Self::perform_llm_completion(
485 client.clone(),
486 llm_api_token,
487 PerformCompletionParams {
488 provider: client::LanguageModelProvider::OpenAi,
489 model: request.model.clone(),
490 provider_request: RawValue::from_string(serde_json::to_string(
491 &request,
492 )?)?,
493 },
494 )
495 .await?;
496 Ok(open_ai::extract_text_from_events(response_lines(response)))
497 });
498 async move { Ok(future.await?.boxed()) }.boxed()
499 }
500 CloudModel::Google(model) => {
501 let client = self.client.clone();
502 let request = request.into_google(model.id().into());
503 let llm_api_token = self.llm_api_token.clone();
504 let future = self.request_limiter.stream(async move {
505 let response = Self::perform_llm_completion(
506 client.clone(),
507 llm_api_token,
508 PerformCompletionParams {
509 provider: client::LanguageModelProvider::Google,
510 model: request.model.clone(),
511 provider_request: RawValue::from_string(serde_json::to_string(
512 &request,
513 )?)?,
514 },
515 )
516 .await?;
517 Ok(google_ai::extract_text_from_events(response_lines(
518 response,
519 )))
520 });
521 async move { Ok(future.await?.boxed()) }.boxed()
522 }
523 CloudModel::Zed(model) => {
524 let client = self.client.clone();
525 let mut request = request.into_open_ai(model.id().into());
526 request.max_tokens = Some(4000);
527 let llm_api_token = self.llm_api_token.clone();
528 let future = self.request_limiter.stream(async move {
529 let response = Self::perform_llm_completion(
530 client.clone(),
531 llm_api_token,
532 PerformCompletionParams {
533 provider: client::LanguageModelProvider::Zed,
534 model: request.model.clone(),
535 provider_request: RawValue::from_string(serde_json::to_string(
536 &request,
537 )?)?,
538 },
539 )
540 .await?;
541 Ok(open_ai::extract_text_from_events(response_lines(response)))
542 });
543 async move { Ok(future.await?.boxed()) }.boxed()
544 }
545 }
546 }
547
548 fn use_any_tool(
549 &self,
550 request: LanguageModelRequest,
551 tool_name: String,
552 tool_description: String,
553 input_schema: serde_json::Value,
554 _cx: &AsyncAppContext,
555 ) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
556 let client = self.client.clone();
557 let llm_api_token = self.llm_api_token.clone();
558
559 match &self.model {
560 CloudModel::Anthropic(model) => {
561 let mut request =
562 request.into_anthropic(model.tool_model_id().into(), model.max_output_tokens());
563 request.tool_choice = Some(anthropic::ToolChoice::Tool {
564 name: tool_name.clone(),
565 });
566 request.tools = vec![anthropic::Tool {
567 name: tool_name.clone(),
568 description: tool_description,
569 input_schema,
570 }];
571
572 self.request_limiter
573 .run(async move {
574 let response = Self::perform_llm_completion(
575 client.clone(),
576 llm_api_token,
577 PerformCompletionParams {
578 provider: client::LanguageModelProvider::Anthropic,
579 model: request.model.clone(),
580 provider_request: RawValue::from_string(serde_json::to_string(
581 &request,
582 )?)?,
583 },
584 )
585 .await?;
586
587 Ok(anthropic::extract_tool_args_from_events(
588 tool_name,
589 Box::pin(response_lines(response)),
590 )
591 .await?
592 .boxed())
593 })
594 .boxed()
595 }
596 CloudModel::OpenAi(model) => {
597 let mut request = request.into_open_ai(model.id().into());
598 request.tool_choice = Some(open_ai::ToolChoice::Other(
599 open_ai::ToolDefinition::Function {
600 function: open_ai::FunctionDefinition {
601 name: tool_name.clone(),
602 description: None,
603 parameters: None,
604 },
605 },
606 ));
607 request.tools = vec![open_ai::ToolDefinition::Function {
608 function: open_ai::FunctionDefinition {
609 name: tool_name.clone(),
610 description: Some(tool_description),
611 parameters: Some(input_schema),
612 },
613 }];
614
615 self.request_limiter
616 .run(async move {
617 let response = Self::perform_llm_completion(
618 client.clone(),
619 llm_api_token,
620 PerformCompletionParams {
621 provider: client::LanguageModelProvider::OpenAi,
622 model: request.model.clone(),
623 provider_request: RawValue::from_string(serde_json::to_string(
624 &request,
625 )?)?,
626 },
627 )
628 .await?;
629
630 Ok(open_ai::extract_tool_args_from_events(
631 tool_name,
632 Box::pin(response_lines(response)),
633 )
634 .await?
635 .boxed())
636 })
637 .boxed()
638 }
639 CloudModel::Google(_) => {
640 future::ready(Err(anyhow!("tool use not implemented for Google AI"))).boxed()
641 }
642 CloudModel::Zed(model) => {
643 // All Zed models are OpenAI-based at the time of writing.
644 let mut request = request.into_open_ai(model.id().into());
645 request.tool_choice = Some(open_ai::ToolChoice::Other(
646 open_ai::ToolDefinition::Function {
647 function: open_ai::FunctionDefinition {
648 name: tool_name.clone(),
649 description: None,
650 parameters: None,
651 },
652 },
653 ));
654 request.tools = vec![open_ai::ToolDefinition::Function {
655 function: open_ai::FunctionDefinition {
656 name: tool_name.clone(),
657 description: Some(tool_description),
658 parameters: Some(input_schema),
659 },
660 }];
661
662 self.request_limiter
663 .run(async move {
664 let response = Self::perform_llm_completion(
665 client.clone(),
666 llm_api_token,
667 PerformCompletionParams {
668 provider: client::LanguageModelProvider::Zed,
669 model: request.model.clone(),
670 provider_request: RawValue::from_string(serde_json::to_string(
671 &request,
672 )?)?,
673 },
674 )
675 .await?;
676
677 Ok(open_ai::extract_tool_args_from_events(
678 tool_name,
679 Box::pin(response_lines(response)),
680 )
681 .await?
682 .boxed())
683 })
684 .boxed()
685 }
686 }
687 }
688}
689
690fn response_lines<T: DeserializeOwned>(
691 response: Response<AsyncBody>,
692) -> impl Stream<Item = Result<T>> {
693 futures::stream::try_unfold(
694 (String::new(), BufReader::new(response.into_body())),
695 move |(mut line, mut body)| async {
696 match body.read_line(&mut line).await {
697 Ok(0) => Ok(None),
698 Ok(_) => {
699 let event: T = serde_json::from_str(&line)?;
700 line.clear();
701 Ok(Some((event, (line, body))))
702 }
703 Err(e) => Err(e.into()),
704 }
705 },
706 )
707}
708
709impl LlmApiToken {
710 async fn acquire(&self, client: &Arc<Client>) -> Result<String> {
711 let lock = self.0.upgradable_read().await;
712 if let Some(token) = lock.as_ref() {
713 Ok(token.to_string())
714 } else {
715 Self::fetch(RwLockUpgradableReadGuard::upgrade(lock).await, &client).await
716 }
717 }
718
719 async fn refresh(&self, client: &Arc<Client>) -> Result<String> {
720 Self::fetch(self.0.write().await, &client).await
721 }
722
723 async fn fetch<'a>(
724 mut lock: RwLockWriteGuard<'a, Option<String>>,
725 client: &Arc<Client>,
726 ) -> Result<String> {
727 let response = client.request(proto::GetLlmToken {}).await?;
728 *lock = Some(response.token.clone());
729 Ok(response.token.clone())
730 }
731}
732
733struct ConfigurationView {
734 state: gpui::Model<State>,
735}
736
737impl ConfigurationView {
738 fn authenticate(&mut self, cx: &mut ViewContext<Self>) {
739 self.state.update(cx, |state, cx| {
740 state.authenticate(cx).detach_and_log_err(cx);
741 });
742 cx.notify();
743 }
744}
745
746impl Render for ConfigurationView {
747 fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
748 const ZED_AI_URL: &str = "https://zed.dev/ai";
749 const ACCOUNT_SETTINGS_URL: &str = "https://zed.dev/account";
750
751 let is_connected = !self.state.read(cx).is_signed_out();
752 let plan = self.state.read(cx).user_store.read(cx).current_plan();
753 let must_accept_terms = !self.state.read(cx).has_accepted_terms_of_service(cx);
754
755 let is_pro = plan == Some(proto::Plan::ZedPro);
756
757 if is_connected {
758 v_flex()
759 .gap_3()
760 .max_w_4_5()
761 .when(must_accept_terms, |this| {
762 this.child(Label::new(
763 "You must accept the terms of service to use this provider.",
764 ))
765 })
766 .child(Label::new(
767 if is_pro {
768 "You have full access to Zed's hosted models from Anthropic, OpenAI, Google with faster speeds and higher limits through Zed Pro."
769 } else {
770 "You have basic access to models from Anthropic through the Zed AI Free plan."
771 }))
772 .children(if is_pro {
773 Some(
774 h_flex().child(
775 Button::new("manage_settings", "Manage Subscription")
776 .style(ButtonStyle::Filled)
777 .on_click(
778 cx.listener(|_, _, cx| cx.open_url(ACCOUNT_SETTINGS_URL)),
779 ),
780 ),
781 )
782 } else if cx.has_flag::<ZedPro>() {
783 Some(
784 h_flex()
785 .gap_2()
786 .child(
787 Button::new("learn_more", "Learn more")
788 .style(ButtonStyle::Subtle)
789 .on_click(cx.listener(|_, _, cx| cx.open_url(ZED_AI_URL))),
790 )
791 .child(
792 Button::new("upgrade", "Upgrade")
793 .style(ButtonStyle::Subtle)
794 .color(Color::Accent)
795 .on_click(
796 cx.listener(|_, _, cx| cx.open_url(ACCOUNT_SETTINGS_URL)),
797 ),
798 ),
799 )
800 } else {
801 None
802 })
803 } else {
804 v_flex()
805 .gap_6()
806 .child(Label::new("Use the zed.dev to access language models."))
807 .child(
808 v_flex()
809 .gap_2()
810 .child(
811 Button::new("sign_in", "Sign in")
812 .icon_color(Color::Muted)
813 .icon(IconName::Github)
814 .icon_position(IconPosition::Start)
815 .style(ButtonStyle::Filled)
816 .full_width()
817 .on_click(cx.listener(move |this, _, cx| this.authenticate(cx))),
818 )
819 .child(
820 div().flex().w_full().items_center().child(
821 Label::new("Sign in to enable collaboration.")
822 .color(Color::Muted)
823 .size(LabelSize::Small),
824 ),
825 ),
826 )
827 }
828 }
829}