1use super::open_ai::count_open_ai_tokens;
2use crate::{
3 settings::AllLanguageModelSettings, CloudModel, LanguageModel, LanguageModelCacheConfiguration,
4 LanguageModelId, LanguageModelName, LanguageModelProviderId, LanguageModelProviderName,
5 LanguageModelProviderState, LanguageModelRequest, RateLimiter, ZedModel,
6};
7use anthropic::AnthropicError;
8use anyhow::{anyhow, Result};
9use client::{Client, PerformCompletionParams, UserStore, EXPIRED_LLM_TOKEN_HEADER_NAME};
10use collections::BTreeMap;
11use feature_flags::{FeatureFlagAppExt, ZedPro};
12use futures::{
13 future::BoxFuture, stream::BoxStream, AsyncBufReadExt, FutureExt, Stream, StreamExt,
14 TryStreamExt as _,
15};
16use gpui::{
17 AnyElement, AnyView, AppContext, AsyncAppContext, FontWeight, Model, ModelContext,
18 Subscription, Task,
19};
20use http_client::{AsyncBody, HttpClient, Method, Response};
21use schemars::JsonSchema;
22use serde::{de::DeserializeOwned, Deserialize, Serialize};
23use serde_json::value::RawValue;
24use settings::{Settings, SettingsStore};
25use smol::{
26 io::{AsyncReadExt, BufReader},
27 lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard},
28};
29use std::{future, sync::Arc};
30use strum::IntoEnumIterator;
31use ui::prelude::*;
32
33use crate::{LanguageModelAvailability, LanguageModelProvider};
34
35use super::anthropic::count_anthropic_tokens;
36
37pub const PROVIDER_ID: &str = "zed.dev";
38pub const PROVIDER_NAME: &str = "Zed";
39
40#[derive(Default, Clone, Debug, PartialEq)]
41pub struct ZedDotDevSettings {
42 pub available_models: Vec<AvailableModel>,
43}
44
45#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
46#[serde(rename_all = "lowercase")]
47pub enum AvailableProvider {
48 Anthropic,
49 OpenAi,
50 Google,
51}
52
53#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
54pub struct AvailableModel {
55 provider: AvailableProvider,
56 name: String,
57 max_tokens: usize,
58 tool_override: Option<String>,
59 cache_configuration: Option<LanguageModelCacheConfiguration>,
60}
61
62pub struct CloudLanguageModelProvider {
63 client: Arc<Client>,
64 llm_api_token: LlmApiToken,
65 state: gpui::Model<State>,
66 _maintain_client_status: Task<()>,
67}
68
69pub struct State {
70 client: Arc<Client>,
71 user_store: Model<UserStore>,
72 status: client::Status,
73 accept_terms: Option<Task<Result<()>>>,
74 _subscription: Subscription,
75}
76
77impl State {
78 fn is_signed_out(&self) -> bool {
79 self.status.is_signed_out()
80 }
81
82 fn authenticate(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
83 let client = self.client.clone();
84 cx.spawn(move |this, mut cx| async move {
85 client.authenticate_and_connect(true, &cx).await?;
86 this.update(&mut cx, |_, cx| cx.notify())
87 })
88 }
89
90 fn has_accepted_terms_of_service(&self, cx: &AppContext) -> bool {
91 self.user_store
92 .read(cx)
93 .current_user_has_accepted_terms()
94 .unwrap_or(false)
95 }
96
97 fn accept_terms_of_service(&mut self, cx: &mut ModelContext<Self>) {
98 let user_store = self.user_store.clone();
99 self.accept_terms = Some(cx.spawn(move |this, mut cx| async move {
100 let _ = user_store
101 .update(&mut cx, |store, cx| store.accept_terms_of_service(cx))?
102 .await;
103 this.update(&mut cx, |this, cx| {
104 this.accept_terms = None;
105 cx.notify()
106 })
107 }));
108 }
109}
110
111impl CloudLanguageModelProvider {
112 pub fn new(user_store: Model<UserStore>, client: Arc<Client>, cx: &mut AppContext) -> Self {
113 let mut status_rx = client.status();
114 let status = *status_rx.borrow();
115
116 let state = cx.new_model(|cx| State {
117 client: client.clone(),
118 user_store,
119 status,
120 accept_terms: None,
121 _subscription: cx.observe_global::<SettingsStore>(|_, cx| {
122 cx.notify();
123 }),
124 });
125
126 let state_ref = state.downgrade();
127 let maintain_client_status = cx.spawn(|mut cx| async move {
128 while let Some(status) = status_rx.next().await {
129 if let Some(this) = state_ref.upgrade() {
130 _ = this.update(&mut cx, |this, cx| {
131 if this.status != status {
132 this.status = status;
133 cx.notify();
134 }
135 });
136 } else {
137 break;
138 }
139 }
140 });
141
142 Self {
143 client,
144 state,
145 llm_api_token: LlmApiToken::default(),
146 _maintain_client_status: maintain_client_status,
147 }
148 }
149}
150
151impl LanguageModelProviderState for CloudLanguageModelProvider {
152 type ObservableEntity = State;
153
154 fn observable_entity(&self) -> Option<gpui::Model<Self::ObservableEntity>> {
155 Some(self.state.clone())
156 }
157}
158
159impl LanguageModelProvider for CloudLanguageModelProvider {
160 fn id(&self) -> LanguageModelProviderId {
161 LanguageModelProviderId(PROVIDER_ID.into())
162 }
163
164 fn name(&self) -> LanguageModelProviderName {
165 LanguageModelProviderName(PROVIDER_NAME.into())
166 }
167
168 fn icon(&self) -> IconName {
169 IconName::AiZed
170 }
171
172 fn provided_models(&self, cx: &AppContext) -> Vec<Arc<dyn LanguageModel>> {
173 let mut models = BTreeMap::default();
174
175 if cx.is_staff() {
176 for model in anthropic::Model::iter() {
177 if !matches!(model, anthropic::Model::Custom { .. }) {
178 models.insert(model.id().to_string(), CloudModel::Anthropic(model));
179 }
180 }
181 for model in open_ai::Model::iter() {
182 if !matches!(model, open_ai::Model::Custom { .. }) {
183 models.insert(model.id().to_string(), CloudModel::OpenAi(model));
184 }
185 }
186 for model in google_ai::Model::iter() {
187 if !matches!(model, google_ai::Model::Custom { .. }) {
188 models.insert(model.id().to_string(), CloudModel::Google(model));
189 }
190 }
191 for model in ZedModel::iter() {
192 models.insert(model.id().to_string(), CloudModel::Zed(model));
193 }
194
195 // Override with available models from settings
196 for model in &AllLanguageModelSettings::get_global(cx)
197 .zed_dot_dev
198 .available_models
199 {
200 let model = match model.provider {
201 AvailableProvider::Anthropic => {
202 CloudModel::Anthropic(anthropic::Model::Custom {
203 name: model.name.clone(),
204 max_tokens: model.max_tokens,
205 tool_override: model.tool_override.clone(),
206 cache_configuration: model.cache_configuration.as_ref().map(|config| {
207 anthropic::AnthropicModelCacheConfiguration {
208 max_cache_anchors: config.max_cache_anchors,
209 should_speculate: config.should_speculate,
210 min_total_token: config.min_total_token,
211 }
212 }),
213 })
214 }
215 AvailableProvider::OpenAi => CloudModel::OpenAi(open_ai::Model::Custom {
216 name: model.name.clone(),
217 max_tokens: model.max_tokens,
218 }),
219 AvailableProvider::Google => CloudModel::Google(google_ai::Model::Custom {
220 name: model.name.clone(),
221 max_tokens: model.max_tokens,
222 }),
223 };
224 models.insert(model.id().to_string(), model.clone());
225 }
226 } else {
227 models.insert(
228 anthropic::Model::Claude3_5Sonnet.id().to_string(),
229 CloudModel::Anthropic(anthropic::Model::Claude3_5Sonnet),
230 );
231 }
232
233 models
234 .into_values()
235 .map(|model| {
236 Arc::new(CloudLanguageModel {
237 id: LanguageModelId::from(model.id().to_string()),
238 model,
239 llm_api_token: self.llm_api_token.clone(),
240 client: self.client.clone(),
241 request_limiter: RateLimiter::new(4),
242 }) as Arc<dyn LanguageModel>
243 })
244 .collect()
245 }
246
247 fn is_authenticated(&self, cx: &AppContext) -> bool {
248 !self.state.read(cx).is_signed_out()
249 }
250
251 fn authenticate(&self, _cx: &mut AppContext) -> Task<Result<()>> {
252 Task::ready(Ok(()))
253 }
254
255 fn configuration_view(&self, cx: &mut WindowContext) -> AnyView {
256 cx.new_view(|_cx| ConfigurationView {
257 state: self.state.clone(),
258 })
259 .into()
260 }
261
262 fn must_accept_terms(&self, cx: &AppContext) -> bool {
263 !self.state.read(cx).has_accepted_terms_of_service(cx)
264 }
265
266 fn render_accept_terms(&self, cx: &mut WindowContext) -> Option<AnyElement> {
267 let state = self.state.read(cx);
268
269 let terms = [(
270 "terms_of_service",
271 "Terms of Service",
272 "https://zed.dev/terms-of-service",
273 )]
274 .map(|(id, label, url)| {
275 Button::new(id, label)
276 .style(ButtonStyle::Subtle)
277 .icon(IconName::ExternalLink)
278 .icon_size(IconSize::XSmall)
279 .icon_color(Color::Muted)
280 .on_click(move |_, cx| cx.open_url(url))
281 });
282
283 if state.has_accepted_terms_of_service(cx) {
284 None
285 } else {
286 let disabled = state.accept_terms.is_some();
287 Some(
288 v_flex()
289 .gap_2()
290 .child(
291 v_flex()
292 .child(Label::new("Terms and Conditions").weight(FontWeight::MEDIUM))
293 .child(
294 Label::new(
295 "Please read and accept our terms and conditions to continue.",
296 )
297 .size(LabelSize::Small),
298 ),
299 )
300 .child(v_flex().gap_1().children(terms))
301 .child(
302 h_flex().justify_end().child(
303 Button::new("accept_terms", "I've read it and accept it")
304 .disabled(disabled)
305 .on_click({
306 let state = self.state.downgrade();
307 move |_, cx| {
308 state
309 .update(cx, |state, cx| {
310 state.accept_terms_of_service(cx)
311 })
312 .ok();
313 }
314 }),
315 ),
316 )
317 .into_any(),
318 )
319 }
320 }
321
322 fn reset_credentials(&self, _cx: &mut AppContext) -> Task<Result<()>> {
323 Task::ready(Ok(()))
324 }
325}
326
327pub struct CloudLanguageModel {
328 id: LanguageModelId,
329 model: CloudModel,
330 llm_api_token: LlmApiToken,
331 client: Arc<Client>,
332 request_limiter: RateLimiter,
333}
334
335#[derive(Clone, Default)]
336struct LlmApiToken(Arc<RwLock<Option<String>>>);
337
338impl CloudLanguageModel {
339 async fn perform_llm_completion(
340 client: Arc<Client>,
341 llm_api_token: LlmApiToken,
342 body: PerformCompletionParams,
343 ) -> Result<Response<AsyncBody>> {
344 let http_client = &client.http_client();
345
346 let mut token = llm_api_token.acquire(&client).await?;
347 let mut did_retry = false;
348
349 let response = loop {
350 let request = http_client::Request::builder()
351 .method(Method::POST)
352 .uri(http_client.build_zed_llm_url("/completion", &[])?.as_ref())
353 .header("Content-Type", "application/json")
354 .header("Authorization", format!("Bearer {token}"))
355 .body(serde_json::to_string(&body)?.into())?;
356 let mut response = http_client.send(request).await?;
357 if response.status().is_success() {
358 break response;
359 } else if !did_retry
360 && response
361 .headers()
362 .get(EXPIRED_LLM_TOKEN_HEADER_NAME)
363 .is_some()
364 {
365 did_retry = true;
366 token = llm_api_token.refresh(&client).await?;
367 } else {
368 let mut body = String::new();
369 response.body_mut().read_to_string(&mut body).await?;
370 break Err(anyhow!(
371 "cloud language model completion failed with status {}: {body}",
372 response.status()
373 ))?;
374 }
375 };
376
377 Ok(response)
378 }
379}
380
381impl LanguageModel for CloudLanguageModel {
382 fn id(&self) -> LanguageModelId {
383 self.id.clone()
384 }
385
386 fn name(&self) -> LanguageModelName {
387 LanguageModelName::from(self.model.display_name().to_string())
388 }
389
390 fn provider_id(&self) -> LanguageModelProviderId {
391 LanguageModelProviderId(PROVIDER_ID.into())
392 }
393
394 fn provider_name(&self) -> LanguageModelProviderName {
395 LanguageModelProviderName(PROVIDER_NAME.into())
396 }
397
398 fn telemetry_id(&self) -> String {
399 format!("zed.dev/{}", self.model.id())
400 }
401
402 fn availability(&self) -> LanguageModelAvailability {
403 self.model.availability()
404 }
405
406 fn max_token_count(&self) -> usize {
407 self.model.max_token_count()
408 }
409
410 fn count_tokens(
411 &self,
412 request: LanguageModelRequest,
413 cx: &AppContext,
414 ) -> BoxFuture<'static, Result<usize>> {
415 match self.model.clone() {
416 CloudModel::Anthropic(_) => count_anthropic_tokens(request, cx),
417 CloudModel::OpenAi(model) => count_open_ai_tokens(request, model, cx),
418 CloudModel::Google(model) => {
419 let client = self.client.clone();
420 let request = request.into_google(model.id().into());
421 let request = google_ai::CountTokensRequest {
422 contents: request.contents,
423 };
424 async move {
425 let request = serde_json::to_string(&request)?;
426 let response = client
427 .request(proto::CountLanguageModelTokens {
428 provider: proto::LanguageModelProvider::Google as i32,
429 request,
430 })
431 .await?;
432 Ok(response.token_count as usize)
433 }
434 .boxed()
435 }
436 CloudModel::Zed(_) => {
437 count_open_ai_tokens(request, open_ai::Model::ThreePointFiveTurbo, cx)
438 }
439 }
440 }
441
442 fn stream_completion(
443 &self,
444 request: LanguageModelRequest,
445 _cx: &AsyncAppContext,
446 ) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
447 match &self.model {
448 CloudModel::Anthropic(model) => {
449 let request = request.into_anthropic(model.id().into());
450 let client = self.client.clone();
451 let llm_api_token = self.llm_api_token.clone();
452 let future = self.request_limiter.stream(async move {
453 let response = Self::perform_llm_completion(
454 client.clone(),
455 llm_api_token,
456 PerformCompletionParams {
457 provider: client::LanguageModelProvider::Anthropic,
458 model: request.model.clone(),
459 provider_request: RawValue::from_string(serde_json::to_string(
460 &request,
461 )?)?,
462 },
463 )
464 .await?;
465 Ok(anthropic::extract_text_from_events(
466 response_lines(response).map_err(AnthropicError::Other),
467 ))
468 });
469 async move {
470 Ok(future
471 .await?
472 .map(|result| result.map_err(|err| anyhow!(err)))
473 .boxed())
474 }
475 .boxed()
476 }
477 CloudModel::OpenAi(model) => {
478 let client = self.client.clone();
479 let request = request.into_open_ai(model.id().into());
480 let llm_api_token = self.llm_api_token.clone();
481 let future = self.request_limiter.stream(async move {
482 let response = Self::perform_llm_completion(
483 client.clone(),
484 llm_api_token,
485 PerformCompletionParams {
486 provider: client::LanguageModelProvider::OpenAi,
487 model: request.model.clone(),
488 provider_request: RawValue::from_string(serde_json::to_string(
489 &request,
490 )?)?,
491 },
492 )
493 .await?;
494 Ok(open_ai::extract_text_from_events(response_lines(response)))
495 });
496 async move { Ok(future.await?.boxed()) }.boxed()
497 }
498 CloudModel::Google(model) => {
499 let client = self.client.clone();
500 let request = request.into_google(model.id().into());
501 let llm_api_token = self.llm_api_token.clone();
502 let future = self.request_limiter.stream(async move {
503 let response = Self::perform_llm_completion(
504 client.clone(),
505 llm_api_token,
506 PerformCompletionParams {
507 provider: client::LanguageModelProvider::Google,
508 model: request.model.clone(),
509 provider_request: RawValue::from_string(serde_json::to_string(
510 &request,
511 )?)?,
512 },
513 )
514 .await?;
515 Ok(google_ai::extract_text_from_events(response_lines(
516 response,
517 )))
518 });
519 async move { Ok(future.await?.boxed()) }.boxed()
520 }
521 CloudModel::Zed(model) => {
522 let client = self.client.clone();
523 let mut request = request.into_open_ai(model.id().into());
524 request.max_tokens = Some(4000);
525 let llm_api_token = self.llm_api_token.clone();
526 let future = self.request_limiter.stream(async move {
527 let response = Self::perform_llm_completion(
528 client.clone(),
529 llm_api_token,
530 PerformCompletionParams {
531 provider: client::LanguageModelProvider::Zed,
532 model: request.model.clone(),
533 provider_request: RawValue::from_string(serde_json::to_string(
534 &request,
535 )?)?,
536 },
537 )
538 .await?;
539 Ok(open_ai::extract_text_from_events(response_lines(response)))
540 });
541 async move { Ok(future.await?.boxed()) }.boxed()
542 }
543 }
544 }
545
546 fn use_any_tool(
547 &self,
548 request: LanguageModelRequest,
549 tool_name: String,
550 tool_description: String,
551 input_schema: serde_json::Value,
552 _cx: &AsyncAppContext,
553 ) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
554 let client = self.client.clone();
555 let llm_api_token = self.llm_api_token.clone();
556
557 match &self.model {
558 CloudModel::Anthropic(model) => {
559 let mut request = request.into_anthropic(model.tool_model_id().into());
560 request.tool_choice = Some(anthropic::ToolChoice::Tool {
561 name: tool_name.clone(),
562 });
563 request.tools = vec![anthropic::Tool {
564 name: tool_name.clone(),
565 description: tool_description,
566 input_schema,
567 }];
568
569 self.request_limiter
570 .run(async move {
571 let response = Self::perform_llm_completion(
572 client.clone(),
573 llm_api_token,
574 PerformCompletionParams {
575 provider: client::LanguageModelProvider::Anthropic,
576 model: request.model.clone(),
577 provider_request: RawValue::from_string(serde_json::to_string(
578 &request,
579 )?)?,
580 },
581 )
582 .await?;
583
584 Ok(anthropic::extract_tool_args_from_events(
585 tool_name,
586 Box::pin(response_lines(response)),
587 )
588 .await?
589 .boxed())
590 })
591 .boxed()
592 }
593 CloudModel::OpenAi(model) => {
594 let mut request = request.into_open_ai(model.id().into());
595 request.tool_choice = Some(open_ai::ToolChoice::Other(
596 open_ai::ToolDefinition::Function {
597 function: open_ai::FunctionDefinition {
598 name: tool_name.clone(),
599 description: None,
600 parameters: None,
601 },
602 },
603 ));
604 request.tools = vec![open_ai::ToolDefinition::Function {
605 function: open_ai::FunctionDefinition {
606 name: tool_name.clone(),
607 description: Some(tool_description),
608 parameters: Some(input_schema),
609 },
610 }];
611
612 self.request_limiter
613 .run(async move {
614 let response = Self::perform_llm_completion(
615 client.clone(),
616 llm_api_token,
617 PerformCompletionParams {
618 provider: client::LanguageModelProvider::OpenAi,
619 model: request.model.clone(),
620 provider_request: RawValue::from_string(serde_json::to_string(
621 &request,
622 )?)?,
623 },
624 )
625 .await?;
626
627 Ok(open_ai::extract_tool_args_from_events(
628 tool_name,
629 Box::pin(response_lines(response)),
630 )
631 .await?
632 .boxed())
633 })
634 .boxed()
635 }
636 CloudModel::Google(_) => {
637 future::ready(Err(anyhow!("tool use not implemented for Google AI"))).boxed()
638 }
639 CloudModel::Zed(model) => {
640 // All Zed models are OpenAI-based at the time of writing.
641 let mut request = request.into_open_ai(model.id().into());
642 request.tool_choice = Some(open_ai::ToolChoice::Other(
643 open_ai::ToolDefinition::Function {
644 function: open_ai::FunctionDefinition {
645 name: tool_name.clone(),
646 description: None,
647 parameters: None,
648 },
649 },
650 ));
651 request.tools = vec![open_ai::ToolDefinition::Function {
652 function: open_ai::FunctionDefinition {
653 name: tool_name.clone(),
654 description: Some(tool_description),
655 parameters: Some(input_schema),
656 },
657 }];
658
659 self.request_limiter
660 .run(async move {
661 let response = Self::perform_llm_completion(
662 client.clone(),
663 llm_api_token,
664 PerformCompletionParams {
665 provider: client::LanguageModelProvider::Zed,
666 model: request.model.clone(),
667 provider_request: RawValue::from_string(serde_json::to_string(
668 &request,
669 )?)?,
670 },
671 )
672 .await?;
673
674 Ok(open_ai::extract_tool_args_from_events(
675 tool_name,
676 Box::pin(response_lines(response)),
677 )
678 .await?
679 .boxed())
680 })
681 .boxed()
682 }
683 }
684 }
685}
686
687fn response_lines<T: DeserializeOwned>(
688 response: Response<AsyncBody>,
689) -> impl Stream<Item = Result<T>> {
690 futures::stream::try_unfold(
691 (String::new(), BufReader::new(response.into_body())),
692 move |(mut line, mut body)| async {
693 match body.read_line(&mut line).await {
694 Ok(0) => Ok(None),
695 Ok(_) => {
696 let event: T = serde_json::from_str(&line)?;
697 line.clear();
698 Ok(Some((event, (line, body))))
699 }
700 Err(e) => Err(e.into()),
701 }
702 },
703 )
704}
705
706impl LlmApiToken {
707 async fn acquire(&self, client: &Arc<Client>) -> Result<String> {
708 let lock = self.0.upgradable_read().await;
709 if let Some(token) = lock.as_ref() {
710 Ok(token.to_string())
711 } else {
712 Self::fetch(RwLockUpgradableReadGuard::upgrade(lock).await, &client).await
713 }
714 }
715
716 async fn refresh(&self, client: &Arc<Client>) -> Result<String> {
717 Self::fetch(self.0.write().await, &client).await
718 }
719
720 async fn fetch<'a>(
721 mut lock: RwLockWriteGuard<'a, Option<String>>,
722 client: &Arc<Client>,
723 ) -> Result<String> {
724 let response = client.request(proto::GetLlmToken {}).await?;
725 *lock = Some(response.token.clone());
726 Ok(response.token.clone())
727 }
728}
729
730struct ConfigurationView {
731 state: gpui::Model<State>,
732}
733
734impl ConfigurationView {
735 fn authenticate(&mut self, cx: &mut ViewContext<Self>) {
736 self.state.update(cx, |state, cx| {
737 state.authenticate(cx).detach_and_log_err(cx);
738 });
739 cx.notify();
740 }
741}
742
743impl Render for ConfigurationView {
744 fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
745 const ZED_AI_URL: &str = "https://zed.dev/ai";
746 const ACCOUNT_SETTINGS_URL: &str = "https://zed.dev/account";
747
748 let is_connected = !self.state.read(cx).is_signed_out();
749 let plan = self.state.read(cx).user_store.read(cx).current_plan();
750 let must_accept_terms = !self.state.read(cx).has_accepted_terms_of_service(cx);
751
752 let is_pro = plan == Some(proto::Plan::ZedPro);
753
754 if is_connected {
755 v_flex()
756 .gap_3()
757 .max_w_4_5()
758 .when(must_accept_terms, |this| {
759 this.child(Label::new(
760 "You must accept the terms of service to use this provider.",
761 ))
762 })
763 .child(Label::new(
764 if is_pro {
765 "You have full access to Zed's hosted models from Anthropic, OpenAI, Google with faster speeds and higher limits through Zed Pro."
766 } else {
767 "You have basic access to models from Anthropic through the Zed AI Free plan."
768 }))
769 .children(if is_pro {
770 Some(
771 h_flex().child(
772 Button::new("manage_settings", "Manage Subscription")
773 .style(ButtonStyle::Filled)
774 .on_click(
775 cx.listener(|_, _, cx| cx.open_url(ACCOUNT_SETTINGS_URL)),
776 ),
777 ),
778 )
779 } else if cx.has_flag::<ZedPro>() {
780 Some(
781 h_flex()
782 .gap_2()
783 .child(
784 Button::new("learn_more", "Learn more")
785 .style(ButtonStyle::Subtle)
786 .on_click(cx.listener(|_, _, cx| cx.open_url(ZED_AI_URL))),
787 )
788 .child(
789 Button::new("upgrade", "Upgrade")
790 .style(ButtonStyle::Subtle)
791 .color(Color::Accent)
792 .on_click(
793 cx.listener(|_, _, cx| cx.open_url(ACCOUNT_SETTINGS_URL)),
794 ),
795 ),
796 )
797 } else {
798 None
799 })
800 } else {
801 v_flex()
802 .gap_6()
803 .child(Label::new("Use the zed.dev to access language models."))
804 .child(
805 v_flex()
806 .gap_2()
807 .child(
808 Button::new("sign_in", "Sign in")
809 .icon_color(Color::Muted)
810 .icon(IconName::Github)
811 .icon_position(IconPosition::Start)
812 .style(ButtonStyle::Filled)
813 .full_width()
814 .on_click(cx.listener(move |this, _, cx| this.authenticate(cx))),
815 )
816 .child(
817 div().flex().w_full().items_center().child(
818 Label::new("Sign in to enable collaboration.")
819 .color(Color::Muted)
820 .size(LabelSize::Small),
821 ),
822 ),
823 )
824 }
825 }
826}