1use super::open_ai::count_open_ai_tokens;
2use crate::{
3 settings::AllLanguageModelSettings, CloudModel, LanguageModel, LanguageModelCacheConfiguration,
4 LanguageModelId, LanguageModelName, LanguageModelProviderId, LanguageModelProviderName,
5 LanguageModelProviderState, LanguageModelRequest, RateLimiter, ZedModel,
6};
7use anthropic::AnthropicError;
8use anyhow::{anyhow, Result};
9use client::{Client, PerformCompletionParams, UserStore, EXPIRED_LLM_TOKEN_HEADER_NAME};
10use collections::BTreeMap;
11use feature_flags::{FeatureFlagAppExt, ZedPro};
12use futures::{
13 future::BoxFuture, stream::BoxStream, AsyncBufReadExt, FutureExt, Stream, StreamExt,
14 TryStreamExt as _,
15};
16use gpui::{
17 AnyElement, AnyView, AppContext, AsyncAppContext, FontWeight, Model, ModelContext,
18 Subscription, Task,
19};
20use http_client::{AsyncBody, HttpClient, Method, Response};
21use schemars::JsonSchema;
22use serde::{de::DeserializeOwned, Deserialize, Serialize};
23use serde_json::value::RawValue;
24use settings::{Settings, SettingsStore};
25use smol::{
26 io::{AsyncReadExt, BufReader},
27 lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard},
28};
29use std::{future, sync::Arc};
30use strum::IntoEnumIterator;
31use ui::prelude::*;
32
33use crate::{LanguageModelAvailability, LanguageModelProvider};
34
35use super::anthropic::count_anthropic_tokens;
36
37pub const PROVIDER_ID: &str = "zed.dev";
38pub const PROVIDER_NAME: &str = "Zed";
39
40#[derive(Default, Clone, Debug, PartialEq)]
41pub struct ZedDotDevSettings {
42 pub available_models: Vec<AvailableModel>,
43}
44
45#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
46#[serde(rename_all = "lowercase")]
47pub enum AvailableProvider {
48 Anthropic,
49 OpenAi,
50 Google,
51}
52
53#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
54pub struct AvailableModel {
55 /// The provider of the language model.
56 pub provider: AvailableProvider,
57 /// The model's name in the provider's API. e.g. claude-3-5-sonnet-20240620
58 pub name: String,
59 /// The name displayed in the UI, such as in the assistant panel model dropdown menu.
60 pub display_name: Option<String>,
61 /// The size of the context window, indicating the maximum number of tokens the model can process.
62 pub max_tokens: usize,
63 /// The maximum number of output tokens allowed by the model.
64 pub max_output_tokens: Option<u32>,
65 /// Override this model with a different Anthropic model for tool calls.
66 pub tool_override: Option<String>,
67 /// Indicates whether this custom model supports caching.
68 pub cache_configuration: Option<LanguageModelCacheConfiguration>,
69}
70
71pub struct CloudLanguageModelProvider {
72 client: Arc<Client>,
73 llm_api_token: LlmApiToken,
74 state: gpui::Model<State>,
75 _maintain_client_status: Task<()>,
76}
77
78pub struct State {
79 client: Arc<Client>,
80 user_store: Model<UserStore>,
81 status: client::Status,
82 accept_terms: Option<Task<Result<()>>>,
83 _subscription: Subscription,
84}
85
86impl State {
87 fn is_signed_out(&self) -> bool {
88 self.status.is_signed_out()
89 }
90
91 fn authenticate(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
92 let client = self.client.clone();
93 cx.spawn(move |this, mut cx| async move {
94 client.authenticate_and_connect(true, &cx).await?;
95 this.update(&mut cx, |_, cx| cx.notify())
96 })
97 }
98
99 fn has_accepted_terms_of_service(&self, cx: &AppContext) -> bool {
100 self.user_store
101 .read(cx)
102 .current_user_has_accepted_terms()
103 .unwrap_or(false)
104 }
105
106 fn accept_terms_of_service(&mut self, cx: &mut ModelContext<Self>) {
107 let user_store = self.user_store.clone();
108 self.accept_terms = Some(cx.spawn(move |this, mut cx| async move {
109 let _ = user_store
110 .update(&mut cx, |store, cx| store.accept_terms_of_service(cx))?
111 .await;
112 this.update(&mut cx, |this, cx| {
113 this.accept_terms = None;
114 cx.notify()
115 })
116 }));
117 }
118}
119
120impl CloudLanguageModelProvider {
121 pub fn new(user_store: Model<UserStore>, client: Arc<Client>, cx: &mut AppContext) -> Self {
122 let mut status_rx = client.status();
123 let status = *status_rx.borrow();
124
125 let state = cx.new_model(|cx| State {
126 client: client.clone(),
127 user_store,
128 status,
129 accept_terms: None,
130 _subscription: cx.observe_global::<SettingsStore>(|_, cx| {
131 cx.notify();
132 }),
133 });
134
135 let state_ref = state.downgrade();
136 let maintain_client_status = cx.spawn(|mut cx| async move {
137 while let Some(status) = status_rx.next().await {
138 if let Some(this) = state_ref.upgrade() {
139 _ = this.update(&mut cx, |this, cx| {
140 if this.status != status {
141 this.status = status;
142 cx.notify();
143 }
144 });
145 } else {
146 break;
147 }
148 }
149 });
150
151 Self {
152 client,
153 state,
154 llm_api_token: LlmApiToken::default(),
155 _maintain_client_status: maintain_client_status,
156 }
157 }
158}
159
160impl LanguageModelProviderState for CloudLanguageModelProvider {
161 type ObservableEntity = State;
162
163 fn observable_entity(&self) -> Option<gpui::Model<Self::ObservableEntity>> {
164 Some(self.state.clone())
165 }
166}
167
168impl LanguageModelProvider for CloudLanguageModelProvider {
169 fn id(&self) -> LanguageModelProviderId {
170 LanguageModelProviderId(PROVIDER_ID.into())
171 }
172
173 fn name(&self) -> LanguageModelProviderName {
174 LanguageModelProviderName(PROVIDER_NAME.into())
175 }
176
177 fn icon(&self) -> IconName {
178 IconName::AiZed
179 }
180
181 fn provided_models(&self, cx: &AppContext) -> Vec<Arc<dyn LanguageModel>> {
182 let mut models = BTreeMap::default();
183
184 if cx.is_staff() {
185 for model in anthropic::Model::iter() {
186 if !matches!(model, anthropic::Model::Custom { .. }) {
187 models.insert(model.id().to_string(), CloudModel::Anthropic(model));
188 }
189 }
190 for model in open_ai::Model::iter() {
191 if !matches!(model, open_ai::Model::Custom { .. }) {
192 models.insert(model.id().to_string(), CloudModel::OpenAi(model));
193 }
194 }
195 for model in google_ai::Model::iter() {
196 if !matches!(model, google_ai::Model::Custom { .. }) {
197 models.insert(model.id().to_string(), CloudModel::Google(model));
198 }
199 }
200 for model in ZedModel::iter() {
201 models.insert(model.id().to_string(), CloudModel::Zed(model));
202 }
203
204 // Override with available models from settings
205 for model in &AllLanguageModelSettings::get_global(cx)
206 .zed_dot_dev
207 .available_models
208 {
209 let model = match model.provider {
210 AvailableProvider::Anthropic => {
211 CloudModel::Anthropic(anthropic::Model::Custom {
212 name: model.name.clone(),
213 display_name: model.display_name.clone(),
214 max_tokens: model.max_tokens,
215 tool_override: model.tool_override.clone(),
216 cache_configuration: model.cache_configuration.as_ref().map(|config| {
217 anthropic::AnthropicModelCacheConfiguration {
218 max_cache_anchors: config.max_cache_anchors,
219 should_speculate: config.should_speculate,
220 min_total_token: config.min_total_token,
221 }
222 }),
223 max_output_tokens: model.max_output_tokens,
224 })
225 }
226 AvailableProvider::OpenAi => CloudModel::OpenAi(open_ai::Model::Custom {
227 name: model.name.clone(),
228 max_tokens: model.max_tokens,
229 }),
230 AvailableProvider::Google => CloudModel::Google(google_ai::Model::Custom {
231 name: model.name.clone(),
232 max_tokens: model.max_tokens,
233 }),
234 };
235 models.insert(model.id().to_string(), model.clone());
236 }
237 } else {
238 models.insert(
239 anthropic::Model::Claude3_5Sonnet.id().to_string(),
240 CloudModel::Anthropic(anthropic::Model::Claude3_5Sonnet),
241 );
242 }
243
244 models
245 .into_values()
246 .map(|model| {
247 Arc::new(CloudLanguageModel {
248 id: LanguageModelId::from(model.id().to_string()),
249 model,
250 llm_api_token: self.llm_api_token.clone(),
251 client: self.client.clone(),
252 request_limiter: RateLimiter::new(4),
253 }) as Arc<dyn LanguageModel>
254 })
255 .collect()
256 }
257
258 fn is_authenticated(&self, cx: &AppContext) -> bool {
259 !self.state.read(cx).is_signed_out()
260 }
261
262 fn authenticate(&self, _cx: &mut AppContext) -> Task<Result<()>> {
263 Task::ready(Ok(()))
264 }
265
266 fn configuration_view(&self, cx: &mut WindowContext) -> AnyView {
267 cx.new_view(|_cx| ConfigurationView {
268 state: self.state.clone(),
269 })
270 .into()
271 }
272
273 fn must_accept_terms(&self, cx: &AppContext) -> bool {
274 !self.state.read(cx).has_accepted_terms_of_service(cx)
275 }
276
277 fn render_accept_terms(&self, cx: &mut WindowContext) -> Option<AnyElement> {
278 let state = self.state.read(cx);
279
280 let terms = [(
281 "terms_of_service",
282 "Terms of Service",
283 "https://zed.dev/terms-of-service",
284 )]
285 .map(|(id, label, url)| {
286 Button::new(id, label)
287 .style(ButtonStyle::Subtle)
288 .icon(IconName::ExternalLink)
289 .icon_size(IconSize::XSmall)
290 .icon_color(Color::Muted)
291 .on_click(move |_, cx| cx.open_url(url))
292 });
293
294 if state.has_accepted_terms_of_service(cx) {
295 None
296 } else {
297 let disabled = state.accept_terms.is_some();
298 Some(
299 v_flex()
300 .gap_2()
301 .child(
302 v_flex()
303 .child(Label::new("Terms and Conditions").weight(FontWeight::MEDIUM))
304 .child(
305 Label::new(
306 "Please read and accept our terms and conditions to continue.",
307 )
308 .size(LabelSize::Small),
309 ),
310 )
311 .child(v_flex().gap_1().children(terms))
312 .child(
313 h_flex().justify_end().child(
314 Button::new("accept_terms", "I've read it and accept it")
315 .disabled(disabled)
316 .on_click({
317 let state = self.state.downgrade();
318 move |_, cx| {
319 state
320 .update(cx, |state, cx| {
321 state.accept_terms_of_service(cx)
322 })
323 .ok();
324 }
325 }),
326 ),
327 )
328 .into_any(),
329 )
330 }
331 }
332
333 fn reset_credentials(&self, _cx: &mut AppContext) -> Task<Result<()>> {
334 Task::ready(Ok(()))
335 }
336}
337
338pub struct CloudLanguageModel {
339 id: LanguageModelId,
340 model: CloudModel,
341 llm_api_token: LlmApiToken,
342 client: Arc<Client>,
343 request_limiter: RateLimiter,
344}
345
346#[derive(Clone, Default)]
347struct LlmApiToken(Arc<RwLock<Option<String>>>);
348
349impl CloudLanguageModel {
350 async fn perform_llm_completion(
351 client: Arc<Client>,
352 llm_api_token: LlmApiToken,
353 body: PerformCompletionParams,
354 ) -> Result<Response<AsyncBody>> {
355 let http_client = &client.http_client();
356
357 let mut token = llm_api_token.acquire(&client).await?;
358 let mut did_retry = false;
359
360 let response = loop {
361 let request = http_client::Request::builder()
362 .method(Method::POST)
363 .uri(http_client.build_zed_llm_url("/completion", &[])?.as_ref())
364 .header("Content-Type", "application/json")
365 .header("Authorization", format!("Bearer {token}"))
366 .body(serde_json::to_string(&body)?.into())?;
367 let mut response = http_client.send(request).await?;
368 if response.status().is_success() {
369 break response;
370 } else if !did_retry
371 && response
372 .headers()
373 .get(EXPIRED_LLM_TOKEN_HEADER_NAME)
374 .is_some()
375 {
376 did_retry = true;
377 token = llm_api_token.refresh(&client).await?;
378 } else {
379 let mut body = String::new();
380 response.body_mut().read_to_string(&mut body).await?;
381 break Err(anyhow!(
382 "cloud language model completion failed with status {}: {body}",
383 response.status()
384 ))?;
385 }
386 };
387
388 Ok(response)
389 }
390}
391
392impl LanguageModel for CloudLanguageModel {
393 fn id(&self) -> LanguageModelId {
394 self.id.clone()
395 }
396
397 fn name(&self) -> LanguageModelName {
398 LanguageModelName::from(self.model.display_name().to_string())
399 }
400
401 fn icon(&self) -> Option<IconName> {
402 self.model.icon()
403 }
404
405 fn provider_id(&self) -> LanguageModelProviderId {
406 LanguageModelProviderId(PROVIDER_ID.into())
407 }
408
409 fn provider_name(&self) -> LanguageModelProviderName {
410 LanguageModelProviderName(PROVIDER_NAME.into())
411 }
412
413 fn telemetry_id(&self) -> String {
414 format!("zed.dev/{}", self.model.id())
415 }
416
417 fn availability(&self) -> LanguageModelAvailability {
418 self.model.availability()
419 }
420
421 fn max_token_count(&self) -> usize {
422 self.model.max_token_count()
423 }
424
425 fn count_tokens(
426 &self,
427 request: LanguageModelRequest,
428 cx: &AppContext,
429 ) -> BoxFuture<'static, Result<usize>> {
430 match self.model.clone() {
431 CloudModel::Anthropic(_) => count_anthropic_tokens(request, cx),
432 CloudModel::OpenAi(model) => count_open_ai_tokens(request, model, cx),
433 CloudModel::Google(model) => {
434 let client = self.client.clone();
435 let request = request.into_google(model.id().into());
436 let request = google_ai::CountTokensRequest {
437 contents: request.contents,
438 };
439 async move {
440 let request = serde_json::to_string(&request)?;
441 let response = client
442 .request(proto::CountLanguageModelTokens {
443 provider: proto::LanguageModelProvider::Google as i32,
444 request,
445 })
446 .await?;
447 Ok(response.token_count as usize)
448 }
449 .boxed()
450 }
451 CloudModel::Zed(_) => {
452 count_open_ai_tokens(request, open_ai::Model::ThreePointFiveTurbo, cx)
453 }
454 }
455 }
456
457 fn stream_completion(
458 &self,
459 request: LanguageModelRequest,
460 _cx: &AsyncAppContext,
461 ) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
462 match &self.model {
463 CloudModel::Anthropic(model) => {
464 let request = request.into_anthropic(model.id().into(), model.max_output_tokens());
465 let client = self.client.clone();
466 let llm_api_token = self.llm_api_token.clone();
467 let future = self.request_limiter.stream(async move {
468 let response = Self::perform_llm_completion(
469 client.clone(),
470 llm_api_token,
471 PerformCompletionParams {
472 provider: client::LanguageModelProvider::Anthropic,
473 model: request.model.clone(),
474 provider_request: RawValue::from_string(serde_json::to_string(
475 &request,
476 )?)?,
477 },
478 )
479 .await?;
480 Ok(anthropic::extract_text_from_events(
481 response_lines(response).map_err(AnthropicError::Other),
482 ))
483 });
484 async move {
485 Ok(future
486 .await?
487 .map(|result| result.map_err(|err| anyhow!(err)))
488 .boxed())
489 }
490 .boxed()
491 }
492 CloudModel::OpenAi(model) => {
493 let client = self.client.clone();
494 let request = request.into_open_ai(model.id().into());
495 let llm_api_token = self.llm_api_token.clone();
496 let future = self.request_limiter.stream(async move {
497 let response = Self::perform_llm_completion(
498 client.clone(),
499 llm_api_token,
500 PerformCompletionParams {
501 provider: client::LanguageModelProvider::OpenAi,
502 model: request.model.clone(),
503 provider_request: RawValue::from_string(serde_json::to_string(
504 &request,
505 )?)?,
506 },
507 )
508 .await?;
509 Ok(open_ai::extract_text_from_events(response_lines(response)))
510 });
511 async move { Ok(future.await?.boxed()) }.boxed()
512 }
513 CloudModel::Google(model) => {
514 let client = self.client.clone();
515 let request = request.into_google(model.id().into());
516 let llm_api_token = self.llm_api_token.clone();
517 let future = self.request_limiter.stream(async move {
518 let response = Self::perform_llm_completion(
519 client.clone(),
520 llm_api_token,
521 PerformCompletionParams {
522 provider: client::LanguageModelProvider::Google,
523 model: request.model.clone(),
524 provider_request: RawValue::from_string(serde_json::to_string(
525 &request,
526 )?)?,
527 },
528 )
529 .await?;
530 Ok(google_ai::extract_text_from_events(response_lines(
531 response,
532 )))
533 });
534 async move { Ok(future.await?.boxed()) }.boxed()
535 }
536 CloudModel::Zed(model) => {
537 let client = self.client.clone();
538 let mut request = request.into_open_ai(model.id().into());
539 request.max_tokens = Some(4000);
540 let llm_api_token = self.llm_api_token.clone();
541 let future = self.request_limiter.stream(async move {
542 let response = Self::perform_llm_completion(
543 client.clone(),
544 llm_api_token,
545 PerformCompletionParams {
546 provider: client::LanguageModelProvider::Zed,
547 model: request.model.clone(),
548 provider_request: RawValue::from_string(serde_json::to_string(
549 &request,
550 )?)?,
551 },
552 )
553 .await?;
554 Ok(open_ai::extract_text_from_events(response_lines(response)))
555 });
556 async move { Ok(future.await?.boxed()) }.boxed()
557 }
558 }
559 }
560
561 fn use_any_tool(
562 &self,
563 request: LanguageModelRequest,
564 tool_name: String,
565 tool_description: String,
566 input_schema: serde_json::Value,
567 _cx: &AsyncAppContext,
568 ) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
569 let client = self.client.clone();
570 let llm_api_token = self.llm_api_token.clone();
571
572 match &self.model {
573 CloudModel::Anthropic(model) => {
574 let mut request =
575 request.into_anthropic(model.tool_model_id().into(), model.max_output_tokens());
576 request.tool_choice = Some(anthropic::ToolChoice::Tool {
577 name: tool_name.clone(),
578 });
579 request.tools = vec![anthropic::Tool {
580 name: tool_name.clone(),
581 description: tool_description,
582 input_schema,
583 }];
584
585 self.request_limiter
586 .run(async move {
587 let response = Self::perform_llm_completion(
588 client.clone(),
589 llm_api_token,
590 PerformCompletionParams {
591 provider: client::LanguageModelProvider::Anthropic,
592 model: request.model.clone(),
593 provider_request: RawValue::from_string(serde_json::to_string(
594 &request,
595 )?)?,
596 },
597 )
598 .await?;
599
600 Ok(anthropic::extract_tool_args_from_events(
601 tool_name,
602 Box::pin(response_lines(response)),
603 )
604 .await?
605 .boxed())
606 })
607 .boxed()
608 }
609 CloudModel::OpenAi(model) => {
610 let mut request = request.into_open_ai(model.id().into());
611 request.tool_choice = Some(open_ai::ToolChoice::Other(
612 open_ai::ToolDefinition::Function {
613 function: open_ai::FunctionDefinition {
614 name: tool_name.clone(),
615 description: None,
616 parameters: None,
617 },
618 },
619 ));
620 request.tools = vec![open_ai::ToolDefinition::Function {
621 function: open_ai::FunctionDefinition {
622 name: tool_name.clone(),
623 description: Some(tool_description),
624 parameters: Some(input_schema),
625 },
626 }];
627
628 self.request_limiter
629 .run(async move {
630 let response = Self::perform_llm_completion(
631 client.clone(),
632 llm_api_token,
633 PerformCompletionParams {
634 provider: client::LanguageModelProvider::OpenAi,
635 model: request.model.clone(),
636 provider_request: RawValue::from_string(serde_json::to_string(
637 &request,
638 )?)?,
639 },
640 )
641 .await?;
642
643 Ok(open_ai::extract_tool_args_from_events(
644 tool_name,
645 Box::pin(response_lines(response)),
646 )
647 .await?
648 .boxed())
649 })
650 .boxed()
651 }
652 CloudModel::Google(_) => {
653 future::ready(Err(anyhow!("tool use not implemented for Google AI"))).boxed()
654 }
655 CloudModel::Zed(model) => {
656 // All Zed models are OpenAI-based at the time of writing.
657 let mut request = request.into_open_ai(model.id().into());
658 request.tool_choice = Some(open_ai::ToolChoice::Other(
659 open_ai::ToolDefinition::Function {
660 function: open_ai::FunctionDefinition {
661 name: tool_name.clone(),
662 description: None,
663 parameters: None,
664 },
665 },
666 ));
667 request.tools = vec![open_ai::ToolDefinition::Function {
668 function: open_ai::FunctionDefinition {
669 name: tool_name.clone(),
670 description: Some(tool_description),
671 parameters: Some(input_schema),
672 },
673 }];
674
675 self.request_limiter
676 .run(async move {
677 let response = Self::perform_llm_completion(
678 client.clone(),
679 llm_api_token,
680 PerformCompletionParams {
681 provider: client::LanguageModelProvider::Zed,
682 model: request.model.clone(),
683 provider_request: RawValue::from_string(serde_json::to_string(
684 &request,
685 )?)?,
686 },
687 )
688 .await?;
689
690 Ok(open_ai::extract_tool_args_from_events(
691 tool_name,
692 Box::pin(response_lines(response)),
693 )
694 .await?
695 .boxed())
696 })
697 .boxed()
698 }
699 }
700 }
701}
702
703fn response_lines<T: DeserializeOwned>(
704 response: Response<AsyncBody>,
705) -> impl Stream<Item = Result<T>> {
706 futures::stream::try_unfold(
707 (String::new(), BufReader::new(response.into_body())),
708 move |(mut line, mut body)| async {
709 match body.read_line(&mut line).await {
710 Ok(0) => Ok(None),
711 Ok(_) => {
712 let event: T = serde_json::from_str(&line)?;
713 line.clear();
714 Ok(Some((event, (line, body))))
715 }
716 Err(e) => Err(e.into()),
717 }
718 },
719 )
720}
721
722impl LlmApiToken {
723 async fn acquire(&self, client: &Arc<Client>) -> Result<String> {
724 let lock = self.0.upgradable_read().await;
725 if let Some(token) = lock.as_ref() {
726 Ok(token.to_string())
727 } else {
728 Self::fetch(RwLockUpgradableReadGuard::upgrade(lock).await, &client).await
729 }
730 }
731
732 async fn refresh(&self, client: &Arc<Client>) -> Result<String> {
733 Self::fetch(self.0.write().await, &client).await
734 }
735
736 async fn fetch<'a>(
737 mut lock: RwLockWriteGuard<'a, Option<String>>,
738 client: &Arc<Client>,
739 ) -> Result<String> {
740 let response = client.request(proto::GetLlmToken {}).await?;
741 *lock = Some(response.token.clone());
742 Ok(response.token.clone())
743 }
744}
745
746struct ConfigurationView {
747 state: gpui::Model<State>,
748}
749
750impl ConfigurationView {
751 fn authenticate(&mut self, cx: &mut ViewContext<Self>) {
752 self.state.update(cx, |state, cx| {
753 state.authenticate(cx).detach_and_log_err(cx);
754 });
755 cx.notify();
756 }
757}
758
759impl Render for ConfigurationView {
760 fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
761 const ZED_AI_URL: &str = "https://zed.dev/ai";
762 const ACCOUNT_SETTINGS_URL: &str = "https://zed.dev/account";
763
764 let is_connected = !self.state.read(cx).is_signed_out();
765 let plan = self.state.read(cx).user_store.read(cx).current_plan();
766 let must_accept_terms = !self.state.read(cx).has_accepted_terms_of_service(cx);
767
768 let is_pro = plan == Some(proto::Plan::ZedPro);
769
770 if is_connected {
771 v_flex()
772 .gap_3()
773 .max_w_4_5()
774 .when(must_accept_terms, |this| {
775 this.child(Label::new(
776 "You must accept the terms of service to use this provider.",
777 ))
778 })
779 .child(Label::new(
780 if is_pro {
781 "You have full access to Zed's hosted models from Anthropic, OpenAI, Google with faster speeds and higher limits through Zed Pro."
782 } else {
783 "You have basic access to models from Anthropic through the Zed AI Free plan."
784 }))
785 .children(if is_pro {
786 Some(
787 h_flex().child(
788 Button::new("manage_settings", "Manage Subscription")
789 .style(ButtonStyle::Filled)
790 .on_click(
791 cx.listener(|_, _, cx| cx.open_url(ACCOUNT_SETTINGS_URL)),
792 ),
793 ),
794 )
795 } else if cx.has_flag::<ZedPro>() {
796 Some(
797 h_flex()
798 .gap_2()
799 .child(
800 Button::new("learn_more", "Learn more")
801 .style(ButtonStyle::Subtle)
802 .on_click(cx.listener(|_, _, cx| cx.open_url(ZED_AI_URL))),
803 )
804 .child(
805 Button::new("upgrade", "Upgrade")
806 .style(ButtonStyle::Subtle)
807 .color(Color::Accent)
808 .on_click(
809 cx.listener(|_, _, cx| cx.open_url(ACCOUNT_SETTINGS_URL)),
810 ),
811 ),
812 )
813 } else {
814 None
815 })
816 } else {
817 v_flex()
818 .gap_6()
819 .child(Label::new("Use the zed.dev to access language models."))
820 .child(
821 v_flex()
822 .gap_2()
823 .child(
824 Button::new("sign_in", "Sign in")
825 .icon_color(Color::Muted)
826 .icon(IconName::Github)
827 .icon_position(IconPosition::Start)
828 .style(ButtonStyle::Filled)
829 .full_width()
830 .on_click(cx.listener(move |this, _, cx| this.authenticate(cx))),
831 )
832 .child(
833 div().flex().w_full().items_center().child(
834 Label::new("Sign in to enable collaboration.")
835 .color(Color::Muted)
836 .size(LabelSize::Small),
837 ),
838 ),
839 )
840 }
841 }
842}