1use super::open_ai::count_open_ai_tokens;
2use crate::{
3 settings::AllLanguageModelSettings, CloudModel, LanguageModel, LanguageModelCacheConfiguration,
4 LanguageModelId, LanguageModelName, LanguageModelProviderId, LanguageModelProviderName,
5 LanguageModelProviderState, LanguageModelRequest, RateLimiter, ZedModel,
6};
7use anthropic::AnthropicError;
8use anyhow::{anyhow, Result};
9use client::{Client, PerformCompletionParams, UserStore, EXPIRED_LLM_TOKEN_HEADER_NAME};
10use collections::BTreeMap;
11use feature_flags::{FeatureFlagAppExt, ZedPro};
12use futures::{
13 future::BoxFuture, stream::BoxStream, AsyncBufReadExt, FutureExt, Stream, StreamExt,
14 TryStreamExt as _,
15};
16use gpui::{
17 AnyElement, AnyView, AppContext, AsyncAppContext, FontWeight, Model, ModelContext,
18 Subscription, Task,
19};
20use http_client::{AsyncBody, HttpClient, Method, Response};
21use schemars::JsonSchema;
22use serde::{de::DeserializeOwned, Deserialize, Serialize};
23use serde_json::value::RawValue;
24use settings::{Settings, SettingsStore};
25use smol::{
26 io::{AsyncReadExt, BufReader},
27 lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard},
28};
29use std::{future, sync::Arc};
30use strum::IntoEnumIterator;
31use ui::prelude::*;
32
33use crate::{LanguageModelAvailability, LanguageModelProvider};
34
35use super::anthropic::count_anthropic_tokens;
36
37pub const PROVIDER_ID: &str = "zed.dev";
38pub const PROVIDER_NAME: &str = "Zed";
39
40#[derive(Default, Clone, Debug, PartialEq)]
41pub struct ZedDotDevSettings {
42 pub available_models: Vec<AvailableModel>,
43}
44
45#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
46#[serde(rename_all = "lowercase")]
47pub enum AvailableProvider {
48 Anthropic,
49 OpenAi,
50 Google,
51}
52
53#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
54pub struct AvailableModel {
55 /// The provider of the language model.
56 pub provider: AvailableProvider,
57 /// The model's name in the provider's API. e.g. claude-3-5-sonnet-20240620
58 pub name: String,
59 /// The name displayed in the UI, such as in the assistant panel model dropdown menu.
60 pub display_name: Option<String>,
61 /// The size of the context window, indicating the maximum number of tokens the model can process.
62 pub max_tokens: usize,
63 /// The maximum number of output tokens allowed by the model.
64 pub max_output_tokens: Option<u32>,
65 /// Override this model with a different Anthropic model for tool calls.
66 pub tool_override: Option<String>,
67 /// Indicates whether this custom model supports caching.
68 pub cache_configuration: Option<LanguageModelCacheConfiguration>,
69}
70
71pub struct CloudLanguageModelProvider {
72 client: Arc<Client>,
73 llm_api_token: LlmApiToken,
74 state: gpui::Model<State>,
75 _maintain_client_status: Task<()>,
76}
77
78pub struct State {
79 client: Arc<Client>,
80 user_store: Model<UserStore>,
81 status: client::Status,
82 accept_terms: Option<Task<Result<()>>>,
83 _subscription: Subscription,
84}
85
86impl State {
87 fn is_signed_out(&self) -> bool {
88 self.status.is_signed_out()
89 }
90
91 fn authenticate(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
92 let client = self.client.clone();
93 cx.spawn(move |this, mut cx| async move {
94 client.authenticate_and_connect(true, &cx).await?;
95 this.update(&mut cx, |_, cx| cx.notify())
96 })
97 }
98
99 fn has_accepted_terms_of_service(&self, cx: &AppContext) -> bool {
100 self.user_store
101 .read(cx)
102 .current_user_has_accepted_terms()
103 .unwrap_or(false)
104 }
105
106 fn accept_terms_of_service(&mut self, cx: &mut ModelContext<Self>) {
107 let user_store = self.user_store.clone();
108 self.accept_terms = Some(cx.spawn(move |this, mut cx| async move {
109 let _ = user_store
110 .update(&mut cx, |store, cx| store.accept_terms_of_service(cx))?
111 .await;
112 this.update(&mut cx, |this, cx| {
113 this.accept_terms = None;
114 cx.notify()
115 })
116 }));
117 }
118}
119
120impl CloudLanguageModelProvider {
121 pub fn new(user_store: Model<UserStore>, client: Arc<Client>, cx: &mut AppContext) -> Self {
122 let mut status_rx = client.status();
123 let status = *status_rx.borrow();
124
125 let state = cx.new_model(|cx| State {
126 client: client.clone(),
127 user_store,
128 status,
129 accept_terms: None,
130 _subscription: cx.observe_global::<SettingsStore>(|_, cx| {
131 cx.notify();
132 }),
133 });
134
135 let state_ref = state.downgrade();
136 let maintain_client_status = cx.spawn(|mut cx| async move {
137 while let Some(status) = status_rx.next().await {
138 if let Some(this) = state_ref.upgrade() {
139 _ = this.update(&mut cx, |this, cx| {
140 if this.status != status {
141 this.status = status;
142 cx.notify();
143 }
144 });
145 } else {
146 break;
147 }
148 }
149 });
150
151 Self {
152 client,
153 state,
154 llm_api_token: LlmApiToken::default(),
155 _maintain_client_status: maintain_client_status,
156 }
157 }
158}
159
160impl LanguageModelProviderState for CloudLanguageModelProvider {
161 type ObservableEntity = State;
162
163 fn observable_entity(&self) -> Option<gpui::Model<Self::ObservableEntity>> {
164 Some(self.state.clone())
165 }
166}
167
168impl LanguageModelProvider for CloudLanguageModelProvider {
169 fn id(&self) -> LanguageModelProviderId {
170 LanguageModelProviderId(PROVIDER_ID.into())
171 }
172
173 fn name(&self) -> LanguageModelProviderName {
174 LanguageModelProviderName(PROVIDER_NAME.into())
175 }
176
177 fn icon(&self) -> IconName {
178 IconName::AiZed
179 }
180
181 fn provided_models(&self, cx: &AppContext) -> Vec<Arc<dyn LanguageModel>> {
182 let mut models = BTreeMap::default();
183
184 if cx.is_staff() {
185 for model in anthropic::Model::iter() {
186 if !matches!(model, anthropic::Model::Custom { .. }) {
187 models.insert(model.id().to_string(), CloudModel::Anthropic(model));
188 }
189 }
190 for model in open_ai::Model::iter() {
191 if !matches!(model, open_ai::Model::Custom { .. }) {
192 models.insert(model.id().to_string(), CloudModel::OpenAi(model));
193 }
194 }
195 for model in google_ai::Model::iter() {
196 if !matches!(model, google_ai::Model::Custom { .. }) {
197 models.insert(model.id().to_string(), CloudModel::Google(model));
198 }
199 }
200 for model in ZedModel::iter() {
201 models.insert(model.id().to_string(), CloudModel::Zed(model));
202 }
203
204 // Override with available models from settings
205 for model in &AllLanguageModelSettings::get_global(cx)
206 .zed_dot_dev
207 .available_models
208 {
209 let model = match model.provider {
210 AvailableProvider::Anthropic => {
211 CloudModel::Anthropic(anthropic::Model::Custom {
212 name: model.name.clone(),
213 display_name: model.display_name.clone(),
214 max_tokens: model.max_tokens,
215 tool_override: model.tool_override.clone(),
216 cache_configuration: model.cache_configuration.as_ref().map(|config| {
217 anthropic::AnthropicModelCacheConfiguration {
218 max_cache_anchors: config.max_cache_anchors,
219 should_speculate: config.should_speculate,
220 min_total_token: config.min_total_token,
221 }
222 }),
223 max_output_tokens: model.max_output_tokens,
224 })
225 }
226 AvailableProvider::OpenAi => CloudModel::OpenAi(open_ai::Model::Custom {
227 name: model.name.clone(),
228 max_tokens: model.max_tokens,
229 }),
230 AvailableProvider::Google => CloudModel::Google(google_ai::Model::Custom {
231 name: model.name.clone(),
232 max_tokens: model.max_tokens,
233 }),
234 };
235 models.insert(model.id().to_string(), model.clone());
236 }
237 } else {
238 models.insert(
239 anthropic::Model::Claude3_5Sonnet.id().to_string(),
240 CloudModel::Anthropic(anthropic::Model::Claude3_5Sonnet),
241 );
242 }
243
244 models
245 .into_values()
246 .map(|model| {
247 Arc::new(CloudLanguageModel {
248 id: LanguageModelId::from(model.id().to_string()),
249 model,
250 llm_api_token: self.llm_api_token.clone(),
251 client: self.client.clone(),
252 request_limiter: RateLimiter::new(4),
253 }) as Arc<dyn LanguageModel>
254 })
255 .collect()
256 }
257
258 fn is_authenticated(&self, cx: &AppContext) -> bool {
259 !self.state.read(cx).is_signed_out()
260 }
261
262 fn authenticate(&self, _cx: &mut AppContext) -> Task<Result<()>> {
263 Task::ready(Ok(()))
264 }
265
266 fn configuration_view(&self, cx: &mut WindowContext) -> AnyView {
267 cx.new_view(|_cx| ConfigurationView {
268 state: self.state.clone(),
269 })
270 .into()
271 }
272
273 fn must_accept_terms(&self, cx: &AppContext) -> bool {
274 !self.state.read(cx).has_accepted_terms_of_service(cx)
275 }
276
277 fn render_accept_terms(&self, cx: &mut WindowContext) -> Option<AnyElement> {
278 let state = self.state.read(cx);
279
280 let terms = [(
281 "terms_of_service",
282 "Terms of Service",
283 "https://zed.dev/terms-of-service",
284 )]
285 .map(|(id, label, url)| {
286 Button::new(id, label)
287 .style(ButtonStyle::Subtle)
288 .icon(IconName::ExternalLink)
289 .icon_size(IconSize::XSmall)
290 .icon_color(Color::Muted)
291 .on_click(move |_, cx| cx.open_url(url))
292 });
293
294 if state.has_accepted_terms_of_service(cx) {
295 None
296 } else {
297 let disabled = state.accept_terms.is_some();
298 Some(
299 v_flex()
300 .gap_2()
301 .child(
302 v_flex()
303 .child(Label::new("Terms and Conditions").weight(FontWeight::MEDIUM))
304 .child(
305 Label::new(
306 "Please read and accept our terms and conditions to continue.",
307 )
308 .size(LabelSize::Small),
309 ),
310 )
311 .child(v_flex().gap_1().children(terms))
312 .child(
313 h_flex().justify_end().child(
314 Button::new("accept_terms", "I've read it and accept it")
315 .disabled(disabled)
316 .on_click({
317 let state = self.state.downgrade();
318 move |_, cx| {
319 state
320 .update(cx, |state, cx| {
321 state.accept_terms_of_service(cx)
322 })
323 .ok();
324 }
325 }),
326 ),
327 )
328 .into_any(),
329 )
330 }
331 }
332
333 fn reset_credentials(&self, _cx: &mut AppContext) -> Task<Result<()>> {
334 Task::ready(Ok(()))
335 }
336}
337
338pub struct CloudLanguageModel {
339 id: LanguageModelId,
340 model: CloudModel,
341 llm_api_token: LlmApiToken,
342 client: Arc<Client>,
343 request_limiter: RateLimiter,
344}
345
346#[derive(Clone, Default)]
347struct LlmApiToken(Arc<RwLock<Option<String>>>);
348
349impl CloudLanguageModel {
350 async fn perform_llm_completion(
351 client: Arc<Client>,
352 llm_api_token: LlmApiToken,
353 body: PerformCompletionParams,
354 ) -> Result<Response<AsyncBody>> {
355 let http_client = &client.http_client();
356
357 let mut token = llm_api_token.acquire(&client).await?;
358 let mut did_retry = false;
359
360 let response = loop {
361 let request = http_client::Request::builder()
362 .method(Method::POST)
363 .uri(http_client.build_zed_llm_url("/completion", &[])?.as_ref())
364 .header("Content-Type", "application/json")
365 .header("Authorization", format!("Bearer {token}"))
366 .body(serde_json::to_string(&body)?.into())?;
367 let mut response = http_client.send(request).await?;
368 if response.status().is_success() {
369 break response;
370 } else if !did_retry
371 && response
372 .headers()
373 .get(EXPIRED_LLM_TOKEN_HEADER_NAME)
374 .is_some()
375 {
376 did_retry = true;
377 token = llm_api_token.refresh(&client).await?;
378 } else {
379 let mut body = String::new();
380 response.body_mut().read_to_string(&mut body).await?;
381 break Err(anyhow!(
382 "cloud language model completion failed with status {}: {body}",
383 response.status()
384 ))?;
385 }
386 };
387
388 Ok(response)
389 }
390}
391
392impl LanguageModel for CloudLanguageModel {
393 fn id(&self) -> LanguageModelId {
394 self.id.clone()
395 }
396
397 fn name(&self) -> LanguageModelName {
398 LanguageModelName::from(self.model.display_name().to_string())
399 }
400
401 fn provider_id(&self) -> LanguageModelProviderId {
402 LanguageModelProviderId(PROVIDER_ID.into())
403 }
404
405 fn provider_name(&self) -> LanguageModelProviderName {
406 LanguageModelProviderName(PROVIDER_NAME.into())
407 }
408
409 fn telemetry_id(&self) -> String {
410 format!("zed.dev/{}", self.model.id())
411 }
412
413 fn availability(&self) -> LanguageModelAvailability {
414 self.model.availability()
415 }
416
417 fn max_token_count(&self) -> usize {
418 self.model.max_token_count()
419 }
420
421 fn count_tokens(
422 &self,
423 request: LanguageModelRequest,
424 cx: &AppContext,
425 ) -> BoxFuture<'static, Result<usize>> {
426 match self.model.clone() {
427 CloudModel::Anthropic(_) => count_anthropic_tokens(request, cx),
428 CloudModel::OpenAi(model) => count_open_ai_tokens(request, model, cx),
429 CloudModel::Google(model) => {
430 let client = self.client.clone();
431 let request = request.into_google(model.id().into());
432 let request = google_ai::CountTokensRequest {
433 contents: request.contents,
434 };
435 async move {
436 let request = serde_json::to_string(&request)?;
437 let response = client
438 .request(proto::CountLanguageModelTokens {
439 provider: proto::LanguageModelProvider::Google as i32,
440 request,
441 })
442 .await?;
443 Ok(response.token_count as usize)
444 }
445 .boxed()
446 }
447 CloudModel::Zed(_) => {
448 count_open_ai_tokens(request, open_ai::Model::ThreePointFiveTurbo, cx)
449 }
450 }
451 }
452
453 fn stream_completion(
454 &self,
455 request: LanguageModelRequest,
456 _cx: &AsyncAppContext,
457 ) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
458 match &self.model {
459 CloudModel::Anthropic(model) => {
460 let request = request.into_anthropic(model.id().into(), model.max_output_tokens());
461 let client = self.client.clone();
462 let llm_api_token = self.llm_api_token.clone();
463 let future = self.request_limiter.stream(async move {
464 let response = Self::perform_llm_completion(
465 client.clone(),
466 llm_api_token,
467 PerformCompletionParams {
468 provider: client::LanguageModelProvider::Anthropic,
469 model: request.model.clone(),
470 provider_request: RawValue::from_string(serde_json::to_string(
471 &request,
472 )?)?,
473 },
474 )
475 .await?;
476 Ok(anthropic::extract_text_from_events(
477 response_lines(response).map_err(AnthropicError::Other),
478 ))
479 });
480 async move {
481 Ok(future
482 .await?
483 .map(|result| result.map_err(|err| anyhow!(err)))
484 .boxed())
485 }
486 .boxed()
487 }
488 CloudModel::OpenAi(model) => {
489 let client = self.client.clone();
490 let request = request.into_open_ai(model.id().into());
491 let llm_api_token = self.llm_api_token.clone();
492 let future = self.request_limiter.stream(async move {
493 let response = Self::perform_llm_completion(
494 client.clone(),
495 llm_api_token,
496 PerformCompletionParams {
497 provider: client::LanguageModelProvider::OpenAi,
498 model: request.model.clone(),
499 provider_request: RawValue::from_string(serde_json::to_string(
500 &request,
501 )?)?,
502 },
503 )
504 .await?;
505 Ok(open_ai::extract_text_from_events(response_lines(response)))
506 });
507 async move { Ok(future.await?.boxed()) }.boxed()
508 }
509 CloudModel::Google(model) => {
510 let client = self.client.clone();
511 let request = request.into_google(model.id().into());
512 let llm_api_token = self.llm_api_token.clone();
513 let future = self.request_limiter.stream(async move {
514 let response = Self::perform_llm_completion(
515 client.clone(),
516 llm_api_token,
517 PerformCompletionParams {
518 provider: client::LanguageModelProvider::Google,
519 model: request.model.clone(),
520 provider_request: RawValue::from_string(serde_json::to_string(
521 &request,
522 )?)?,
523 },
524 )
525 .await?;
526 Ok(google_ai::extract_text_from_events(response_lines(
527 response,
528 )))
529 });
530 async move { Ok(future.await?.boxed()) }.boxed()
531 }
532 CloudModel::Zed(model) => {
533 let client = self.client.clone();
534 let mut request = request.into_open_ai(model.id().into());
535 request.max_tokens = Some(4000);
536 let llm_api_token = self.llm_api_token.clone();
537 let future = self.request_limiter.stream(async move {
538 let response = Self::perform_llm_completion(
539 client.clone(),
540 llm_api_token,
541 PerformCompletionParams {
542 provider: client::LanguageModelProvider::Zed,
543 model: request.model.clone(),
544 provider_request: RawValue::from_string(serde_json::to_string(
545 &request,
546 )?)?,
547 },
548 )
549 .await?;
550 Ok(open_ai::extract_text_from_events(response_lines(response)))
551 });
552 async move { Ok(future.await?.boxed()) }.boxed()
553 }
554 }
555 }
556
557 fn use_any_tool(
558 &self,
559 request: LanguageModelRequest,
560 tool_name: String,
561 tool_description: String,
562 input_schema: serde_json::Value,
563 _cx: &AsyncAppContext,
564 ) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
565 let client = self.client.clone();
566 let llm_api_token = self.llm_api_token.clone();
567
568 match &self.model {
569 CloudModel::Anthropic(model) => {
570 let mut request =
571 request.into_anthropic(model.tool_model_id().into(), model.max_output_tokens());
572 request.tool_choice = Some(anthropic::ToolChoice::Tool {
573 name: tool_name.clone(),
574 });
575 request.tools = vec![anthropic::Tool {
576 name: tool_name.clone(),
577 description: tool_description,
578 input_schema,
579 }];
580
581 self.request_limiter
582 .run(async move {
583 let response = Self::perform_llm_completion(
584 client.clone(),
585 llm_api_token,
586 PerformCompletionParams {
587 provider: client::LanguageModelProvider::Anthropic,
588 model: request.model.clone(),
589 provider_request: RawValue::from_string(serde_json::to_string(
590 &request,
591 )?)?,
592 },
593 )
594 .await?;
595
596 Ok(anthropic::extract_tool_args_from_events(
597 tool_name,
598 Box::pin(response_lines(response)),
599 )
600 .await?
601 .boxed())
602 })
603 .boxed()
604 }
605 CloudModel::OpenAi(model) => {
606 let mut request = request.into_open_ai(model.id().into());
607 request.tool_choice = Some(open_ai::ToolChoice::Other(
608 open_ai::ToolDefinition::Function {
609 function: open_ai::FunctionDefinition {
610 name: tool_name.clone(),
611 description: None,
612 parameters: None,
613 },
614 },
615 ));
616 request.tools = vec![open_ai::ToolDefinition::Function {
617 function: open_ai::FunctionDefinition {
618 name: tool_name.clone(),
619 description: Some(tool_description),
620 parameters: Some(input_schema),
621 },
622 }];
623
624 self.request_limiter
625 .run(async move {
626 let response = Self::perform_llm_completion(
627 client.clone(),
628 llm_api_token,
629 PerformCompletionParams {
630 provider: client::LanguageModelProvider::OpenAi,
631 model: request.model.clone(),
632 provider_request: RawValue::from_string(serde_json::to_string(
633 &request,
634 )?)?,
635 },
636 )
637 .await?;
638
639 Ok(open_ai::extract_tool_args_from_events(
640 tool_name,
641 Box::pin(response_lines(response)),
642 )
643 .await?
644 .boxed())
645 })
646 .boxed()
647 }
648 CloudModel::Google(_) => {
649 future::ready(Err(anyhow!("tool use not implemented for Google AI"))).boxed()
650 }
651 CloudModel::Zed(model) => {
652 // All Zed models are OpenAI-based at the time of writing.
653 let mut request = request.into_open_ai(model.id().into());
654 request.tool_choice = Some(open_ai::ToolChoice::Other(
655 open_ai::ToolDefinition::Function {
656 function: open_ai::FunctionDefinition {
657 name: tool_name.clone(),
658 description: None,
659 parameters: None,
660 },
661 },
662 ));
663 request.tools = vec![open_ai::ToolDefinition::Function {
664 function: open_ai::FunctionDefinition {
665 name: tool_name.clone(),
666 description: Some(tool_description),
667 parameters: Some(input_schema),
668 },
669 }];
670
671 self.request_limiter
672 .run(async move {
673 let response = Self::perform_llm_completion(
674 client.clone(),
675 llm_api_token,
676 PerformCompletionParams {
677 provider: client::LanguageModelProvider::Zed,
678 model: request.model.clone(),
679 provider_request: RawValue::from_string(serde_json::to_string(
680 &request,
681 )?)?,
682 },
683 )
684 .await?;
685
686 Ok(open_ai::extract_tool_args_from_events(
687 tool_name,
688 Box::pin(response_lines(response)),
689 )
690 .await?
691 .boxed())
692 })
693 .boxed()
694 }
695 }
696 }
697}
698
699fn response_lines<T: DeserializeOwned>(
700 response: Response<AsyncBody>,
701) -> impl Stream<Item = Result<T>> {
702 futures::stream::try_unfold(
703 (String::new(), BufReader::new(response.into_body())),
704 move |(mut line, mut body)| async {
705 match body.read_line(&mut line).await {
706 Ok(0) => Ok(None),
707 Ok(_) => {
708 let event: T = serde_json::from_str(&line)?;
709 line.clear();
710 Ok(Some((event, (line, body))))
711 }
712 Err(e) => Err(e.into()),
713 }
714 },
715 )
716}
717
718impl LlmApiToken {
719 async fn acquire(&self, client: &Arc<Client>) -> Result<String> {
720 let lock = self.0.upgradable_read().await;
721 if let Some(token) = lock.as_ref() {
722 Ok(token.to_string())
723 } else {
724 Self::fetch(RwLockUpgradableReadGuard::upgrade(lock).await, &client).await
725 }
726 }
727
728 async fn refresh(&self, client: &Arc<Client>) -> Result<String> {
729 Self::fetch(self.0.write().await, &client).await
730 }
731
732 async fn fetch<'a>(
733 mut lock: RwLockWriteGuard<'a, Option<String>>,
734 client: &Arc<Client>,
735 ) -> Result<String> {
736 let response = client.request(proto::GetLlmToken {}).await?;
737 *lock = Some(response.token.clone());
738 Ok(response.token.clone())
739 }
740}
741
742struct ConfigurationView {
743 state: gpui::Model<State>,
744}
745
746impl ConfigurationView {
747 fn authenticate(&mut self, cx: &mut ViewContext<Self>) {
748 self.state.update(cx, |state, cx| {
749 state.authenticate(cx).detach_and_log_err(cx);
750 });
751 cx.notify();
752 }
753}
754
755impl Render for ConfigurationView {
756 fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
757 const ZED_AI_URL: &str = "https://zed.dev/ai";
758 const ACCOUNT_SETTINGS_URL: &str = "https://zed.dev/account";
759
760 let is_connected = !self.state.read(cx).is_signed_out();
761 let plan = self.state.read(cx).user_store.read(cx).current_plan();
762 let must_accept_terms = !self.state.read(cx).has_accepted_terms_of_service(cx);
763
764 let is_pro = plan == Some(proto::Plan::ZedPro);
765
766 if is_connected {
767 v_flex()
768 .gap_3()
769 .max_w_4_5()
770 .when(must_accept_terms, |this| {
771 this.child(Label::new(
772 "You must accept the terms of service to use this provider.",
773 ))
774 })
775 .child(Label::new(
776 if is_pro {
777 "You have full access to Zed's hosted models from Anthropic, OpenAI, Google with faster speeds and higher limits through Zed Pro."
778 } else {
779 "You have basic access to models from Anthropic through the Zed AI Free plan."
780 }))
781 .children(if is_pro {
782 Some(
783 h_flex().child(
784 Button::new("manage_settings", "Manage Subscription")
785 .style(ButtonStyle::Filled)
786 .on_click(
787 cx.listener(|_, _, cx| cx.open_url(ACCOUNT_SETTINGS_URL)),
788 ),
789 ),
790 )
791 } else if cx.has_flag::<ZedPro>() {
792 Some(
793 h_flex()
794 .gap_2()
795 .child(
796 Button::new("learn_more", "Learn more")
797 .style(ButtonStyle::Subtle)
798 .on_click(cx.listener(|_, _, cx| cx.open_url(ZED_AI_URL))),
799 )
800 .child(
801 Button::new("upgrade", "Upgrade")
802 .style(ButtonStyle::Subtle)
803 .color(Color::Accent)
804 .on_click(
805 cx.listener(|_, _, cx| cx.open_url(ACCOUNT_SETTINGS_URL)),
806 ),
807 ),
808 )
809 } else {
810 None
811 })
812 } else {
813 v_flex()
814 .gap_6()
815 .child(Label::new("Use the zed.dev to access language models."))
816 .child(
817 v_flex()
818 .gap_2()
819 .child(
820 Button::new("sign_in", "Sign in")
821 .icon_color(Color::Muted)
822 .icon(IconName::Github)
823 .icon_position(IconPosition::Start)
824 .style(ButtonStyle::Filled)
825 .full_width()
826 .on_click(cx.listener(move |this, _, cx| this.authenticate(cx))),
827 )
828 .child(
829 div().flex().w_full().items_center().child(
830 Label::new("Sign in to enable collaboration.")
831 .color(Color::Muted)
832 .size(LabelSize::Small),
833 ),
834 ),
835 )
836 }
837 }
838}