vercel.rs

  1use anyhow::{Result, anyhow};
  2use collections::BTreeMap;
  3use futures::{FutureExt, StreamExt, future, future::BoxFuture};
  4use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window};
  5use http_client::HttpClient;
  6use language_model::{
  7    AuthenticateError, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
  8    LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
  9    LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
 10    LanguageModelToolChoice, RateLimiter, Role,
 11};
 12use open_ai::ResponseStreamEvent;
 13pub use settings::VercelAvailableModel as AvailableModel;
 14use settings::{Settings, SettingsStore};
 15use std::sync::{Arc, LazyLock};
 16use strum::IntoEnumIterator;
 17use ui::{List, prelude::*};
 18use ui_input::InputField;
 19use util::ResultExt;
 20use vercel::{Model, VERCEL_API_URL};
 21use zed_env_vars::{EnvVar, env_var};
 22
 23use crate::{
 24    api_key::ApiKeyState,
 25    ui::{ConfiguredApiCard, InstructionListItem},
 26};
 27
 28const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("vercel");
 29const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("Vercel");
 30
 31const API_KEY_ENV_VAR_NAME: &str = "VERCEL_API_KEY";
 32static API_KEY_ENV_VAR: LazyLock<EnvVar> = env_var!(API_KEY_ENV_VAR_NAME);
 33
 34#[derive(Clone, Debug, PartialEq)]
 35pub struct VercelSettings {
 36    pub api_url: String,
 37    pub available_models: Vec<AvailableModel>,
 38}
 39
 40pub struct VercelLanguageModelProvider {
 41    http_client: Arc<dyn HttpClient>,
 42    state: Entity<State>,
 43}
 44
 45pub struct State {
 46    api_key_state: ApiKeyState,
 47}
 48
 49impl State {
 50    fn is_authenticated(&self) -> bool {
 51        self.api_key_state.has_key()
 52    }
 53
 54    fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
 55        let api_url = VercelLanguageModelProvider::api_url(cx);
 56        self.api_key_state
 57            .store(api_url, api_key, |this| &mut this.api_key_state, cx)
 58    }
 59
 60    fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
 61        let api_url = VercelLanguageModelProvider::api_url(cx);
 62        self.api_key_state.load_if_needed(
 63            api_url,
 64            &API_KEY_ENV_VAR,
 65            |this| &mut this.api_key_state,
 66            cx,
 67        )
 68    }
 69}
 70
 71impl VercelLanguageModelProvider {
 72    pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
 73        let state = cx.new(|cx| {
 74            cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
 75                let api_url = Self::api_url(cx);
 76                this.api_key_state.handle_url_change(
 77                    api_url,
 78                    &API_KEY_ENV_VAR,
 79                    |this| &mut this.api_key_state,
 80                    cx,
 81                );
 82                cx.notify();
 83            })
 84            .detach();
 85            State {
 86                api_key_state: ApiKeyState::new(Self::api_url(cx)),
 87            }
 88        });
 89
 90        Self { http_client, state }
 91    }
 92
 93    fn create_language_model(&self, model: vercel::Model) -> Arc<dyn LanguageModel> {
 94        Arc::new(VercelLanguageModel {
 95            id: LanguageModelId::from(model.id().to_string()),
 96            model,
 97            state: self.state.clone(),
 98            http_client: self.http_client.clone(),
 99            request_limiter: RateLimiter::new(4),
100        })
101    }
102
103    fn settings(cx: &App) -> &VercelSettings {
104        &crate::AllLanguageModelSettings::get_global(cx).vercel
105    }
106
107    fn api_url(cx: &App) -> SharedString {
108        let api_url = &Self::settings(cx).api_url;
109        if api_url.is_empty() {
110            VERCEL_API_URL.into()
111        } else {
112            SharedString::new(api_url.as_str())
113        }
114    }
115}
116
117impl LanguageModelProviderState for VercelLanguageModelProvider {
118    type ObservableEntity = State;
119
120    fn observable_entity(&self) -> Option<Entity<Self::ObservableEntity>> {
121        Some(self.state.clone())
122    }
123}
124
125impl LanguageModelProvider for VercelLanguageModelProvider {
126    fn id(&self) -> LanguageModelProviderId {
127        PROVIDER_ID
128    }
129
130    fn name(&self) -> LanguageModelProviderName {
131        PROVIDER_NAME
132    }
133
134    fn icon(&self) -> IconName {
135        IconName::AiVZero
136    }
137
138    fn default_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
139        Some(self.create_language_model(vercel::Model::default()))
140    }
141
142    fn default_fast_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
143        Some(self.create_language_model(vercel::Model::default_fast()))
144    }
145
146    fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
147        let mut models = BTreeMap::default();
148
149        for model in vercel::Model::iter() {
150            if !matches!(model, vercel::Model::Custom { .. }) {
151                models.insert(model.id().to_string(), model);
152            }
153        }
154
155        for model in &Self::settings(cx).available_models {
156            models.insert(
157                model.name.clone(),
158                vercel::Model::Custom {
159                    name: model.name.clone(),
160                    display_name: model.display_name.clone(),
161                    max_tokens: model.max_tokens,
162                    max_output_tokens: model.max_output_tokens,
163                    max_completion_tokens: model.max_completion_tokens,
164                },
165            );
166        }
167
168        models
169            .into_values()
170            .map(|model| self.create_language_model(model))
171            .collect()
172    }
173
174    fn is_authenticated(&self, cx: &App) -> bool {
175        self.state.read(cx).is_authenticated()
176    }
177
178    fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
179        self.state.update(cx, |state, cx| state.authenticate(cx))
180    }
181
182    fn configuration_view(
183        &self,
184        _target_agent: language_model::ConfigurationViewTargetAgent,
185        window: &mut Window,
186        cx: &mut App,
187    ) -> AnyView {
188        cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx))
189            .into()
190    }
191
192    fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
193        self.state
194            .update(cx, |state, cx| state.set_api_key(None, cx))
195    }
196}
197
198pub struct VercelLanguageModel {
199    id: LanguageModelId,
200    model: vercel::Model,
201    state: Entity<State>,
202    http_client: Arc<dyn HttpClient>,
203    request_limiter: RateLimiter,
204}
205
206impl VercelLanguageModel {
207    fn stream_completion(
208        &self,
209        request: open_ai::Request,
210        cx: &AsyncApp,
211    ) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<ResponseStreamEvent>>>>
212    {
213        let http_client = self.http_client.clone();
214
215        let Ok((api_key, api_url)) = self.state.read_with(cx, |state, cx| {
216            let api_url = VercelLanguageModelProvider::api_url(cx);
217            (state.api_key_state.key(&api_url), api_url)
218        }) else {
219            return future::ready(Err(anyhow!("App state dropped"))).boxed();
220        };
221
222        let future = self.request_limiter.stream(async move {
223            let provider = PROVIDER_NAME;
224            let Some(api_key) = api_key else {
225                return Err(LanguageModelCompletionError::NoApiKey { provider });
226            };
227            let request = open_ai::stream_completion(
228                http_client.as_ref(),
229                provider.0.as_str(),
230                &api_url,
231                &api_key,
232                request,
233            );
234            let response = request.await?;
235            Ok(response)
236        });
237
238        async move { Ok(future.await?.boxed()) }.boxed()
239    }
240}
241
242impl LanguageModel for VercelLanguageModel {
243    fn id(&self) -> LanguageModelId {
244        self.id.clone()
245    }
246
247    fn name(&self) -> LanguageModelName {
248        LanguageModelName::from(self.model.display_name().to_string())
249    }
250
251    fn provider_id(&self) -> LanguageModelProviderId {
252        PROVIDER_ID
253    }
254
255    fn provider_name(&self) -> LanguageModelProviderName {
256        PROVIDER_NAME
257    }
258
259    fn supports_tools(&self) -> bool {
260        true
261    }
262
263    fn supports_images(&self) -> bool {
264        true
265    }
266
267    fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
268        match choice {
269            LanguageModelToolChoice::Auto
270            | LanguageModelToolChoice::Any
271            | LanguageModelToolChoice::None => true,
272        }
273    }
274
275    fn telemetry_id(&self) -> String {
276        format!("vercel/{}", self.model.id())
277    }
278
279    fn max_token_count(&self) -> u64 {
280        self.model.max_token_count()
281    }
282
283    fn max_output_tokens(&self) -> Option<u64> {
284        self.model.max_output_tokens()
285    }
286
287    fn count_tokens(
288        &self,
289        request: LanguageModelRequest,
290        cx: &App,
291    ) -> BoxFuture<'static, Result<u64>> {
292        count_vercel_tokens(request, self.model.clone(), cx)
293    }
294
295    fn stream_completion(
296        &self,
297        request: LanguageModelRequest,
298        cx: &AsyncApp,
299    ) -> BoxFuture<
300        'static,
301        Result<
302            futures::stream::BoxStream<
303                'static,
304                Result<LanguageModelCompletionEvent, LanguageModelCompletionError>,
305            >,
306            LanguageModelCompletionError,
307        >,
308    > {
309        let request = crate::provider::open_ai::into_open_ai(
310            request,
311            self.model.id(),
312            self.model.supports_parallel_tool_calls(),
313            self.model.supports_prompt_cache_key(),
314            self.max_output_tokens(),
315            None,
316        );
317        let completions = self.stream_completion(request, cx);
318        async move {
319            let mapper = crate::provider::open_ai::OpenAiEventMapper::new();
320            Ok(mapper.map_stream(completions.await?).boxed())
321        }
322        .boxed()
323    }
324}
325
326pub fn count_vercel_tokens(
327    request: LanguageModelRequest,
328    model: Model,
329    cx: &App,
330) -> BoxFuture<'static, Result<u64>> {
331    cx.background_spawn(async move {
332        let messages = request
333            .messages
334            .into_iter()
335            .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
336                role: match message.role {
337                    Role::User => "user".into(),
338                    Role::Assistant => "assistant".into(),
339                    Role::System => "system".into(),
340                },
341                content: Some(message.string_contents()),
342                name: None,
343                function_call: None,
344            })
345            .collect::<Vec<_>>();
346
347        match model {
348            Model::Custom { max_tokens, .. } => {
349                let model = if max_tokens >= 100_000 {
350                    // If the max tokens is 100k or more, it is likely the o200k_base tokenizer from gpt4o
351                    "gpt-4o"
352                } else {
353                    // Otherwise fallback to gpt-4, since only cl100k_base and o200k_base are
354                    // supported with this tiktoken method
355                    "gpt-4"
356                };
357                tiktoken_rs::num_tokens_from_messages(model, &messages)
358            }
359            // Map Vercel models to appropriate OpenAI models for token counting
360            // since Vercel uses OpenAI-compatible API
361            Model::VZeroOnePointFiveMedium => {
362                // Vercel v0 is similar to GPT-4o, so use gpt-4o for token counting
363                tiktoken_rs::num_tokens_from_messages("gpt-4o", &messages)
364            }
365        }
366        .map(|tokens| tokens as u64)
367    })
368    .boxed()
369}
370
371struct ConfigurationView {
372    api_key_editor: Entity<InputField>,
373    state: Entity<State>,
374    load_credentials_task: Option<Task<()>>,
375}
376
377impl ConfigurationView {
378    fn new(state: Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
379        let api_key_editor = cx.new(|cx| {
380            InputField::new(
381                window,
382                cx,
383                "v1:0000000000000000000000000000000000000000000000000",
384            )
385            .label("API key")
386        });
387
388        cx.observe(&state, |_, _, cx| {
389            cx.notify();
390        })
391        .detach();
392
393        let load_credentials_task = Some(cx.spawn_in(window, {
394            let state = state.clone();
395            async move |this, cx| {
396                if let Some(task) = state
397                    .update(cx, |state, cx| state.authenticate(cx))
398                    .log_err()
399                {
400                    // We don't log an error, because "not signed in" is also an error.
401                    let _ = task.await;
402                }
403                this.update(cx, |this, cx| {
404                    this.load_credentials_task = None;
405                    cx.notify();
406                })
407                .log_err();
408            }
409        }));
410
411        Self {
412            api_key_editor,
413            state,
414            load_credentials_task,
415        }
416    }
417
418    fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
419        let api_key = self.api_key_editor.read(cx).text(cx).trim().to_string();
420        if api_key.is_empty() {
421            return;
422        }
423
424        // url changes can cause the editor to be displayed again
425        self.api_key_editor
426            .update(cx, |editor, cx| editor.set_text("", window, cx));
427
428        let state = self.state.clone();
429        cx.spawn_in(window, async move |_, cx| {
430            state
431                .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))?
432                .await
433        })
434        .detach_and_log_err(cx);
435    }
436
437    fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context<Self>) {
438        self.api_key_editor
439            .update(cx, |input, cx| input.set_text("", window, cx));
440
441        let state = self.state.clone();
442        cx.spawn_in(window, async move |_, cx| {
443            state
444                .update(cx, |state, cx| state.set_api_key(None, cx))?
445                .await
446        })
447        .detach_and_log_err(cx);
448    }
449
450    fn should_render_editor(&self, cx: &mut Context<Self>) -> bool {
451        !self.state.read(cx).is_authenticated()
452    }
453}
454
455impl Render for ConfigurationView {
456    fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
457        let env_var_set = self.state.read(cx).api_key_state.is_from_env_var();
458        let configured_card_label = if env_var_set {
459            format!("API key set in {API_KEY_ENV_VAR_NAME} environment variable")
460        } else {
461            let api_url = VercelLanguageModelProvider::api_url(cx);
462            if api_url == VERCEL_API_URL {
463                "API key configured".to_string()
464            } else {
465                format!("API key configured for {}", api_url)
466            }
467        };
468
469        let api_key_section = if self.should_render_editor(cx) {
470            v_flex()
471                .on_action(cx.listener(Self::save_api_key))
472                .child(Label::new("To use Zed's agent with Vercel v0, you need to add an API key. Follow these steps:"))
473                .child(
474                    List::new()
475                        .child(InstructionListItem::new(
476                            "Create one by visiting",
477                            Some("Vercel v0's console"),
478                            Some("https://v0.dev/chat/settings/keys"),
479                        ))
480                        .child(InstructionListItem::text_only(
481                            "Paste your API key below and hit enter to start using the agent",
482                        )),
483                )
484                .child(self.api_key_editor.clone())
485                .child(
486                    Label::new(format!(
487                        "You can also assign the {API_KEY_ENV_VAR_NAME} environment variable and restart Zed."
488                    ))
489                    .size(LabelSize::Small)
490                    .color(Color::Muted),
491                )
492                .child(
493                    Label::new("Note that Vercel v0 is a custom OpenAI-compatible provider.")
494                        .size(LabelSize::Small)
495                        .color(Color::Muted),
496                )
497                .into_any_element()
498        } else {
499            ConfiguredApiCard::new(configured_card_label)
500                .disabled(env_var_set)
501                .when(env_var_set, |this| {
502                    this.tooltip_label(format!("To reset your API key, unset the {API_KEY_ENV_VAR_NAME} environment variable."))
503                })
504                .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx)))
505                .into_any_element()
506        };
507
508        if self.load_credentials_task.is_some() {
509            div().child(Label::new("Loading credentials…")).into_any()
510        } else {
511            v_flex().size_full().child(api_key_section).into_any()
512        }
513    }
514}