vercel.rs

  1use anyhow::Result;
  2use collections::BTreeMap;
  3use futures::{FutureExt, StreamExt, future::BoxFuture};
  4use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window};
  5use http_client::HttpClient;
  6use language_model::{
  7    AuthenticateError, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
  8    LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
  9    LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
 10    LanguageModelToolChoice, RateLimiter, Role,
 11};
 12use open_ai::ResponseStreamEvent;
 13use schemars::JsonSchema;
 14use serde::{Deserialize, Serialize};
 15use settings::{Settings, SettingsStore};
 16use std::sync::{Arc, LazyLock};
 17use strum::IntoEnumIterator;
 18use ui::{ElevationIndex, List, Tooltip, prelude::*};
 19use ui_input::SingleLineInput;
 20use util::ResultExt;
 21use vercel::Model;
 22use zed_env_vars::{EnvVar, env_var};
 23
 24use crate::{api_key::ApiKeyState, ui::InstructionListItem};
 25
 26const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("vercel");
 27const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("Vercel");
 28
 29const API_KEY_ENV_VAR_NAME: &str = "VERCEL_API_KEY";
 30static API_KEY_ENV_VAR: LazyLock<EnvVar> = env_var!(API_KEY_ENV_VAR_NAME);
 31
 32#[derive(Default, Clone, Debug, PartialEq)]
 33pub struct VercelSettings {
 34    pub api_url: String,
 35    pub available_models: Vec<AvailableModel>,
 36}
 37
 38#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
 39pub struct AvailableModel {
 40    pub name: String,
 41    pub display_name: Option<String>,
 42    pub max_tokens: u64,
 43    pub max_output_tokens: Option<u64>,
 44    pub max_completion_tokens: Option<u64>,
 45}
 46
 47pub struct VercelLanguageModelProvider {
 48    http_client: Arc<dyn HttpClient>,
 49    state: gpui::Entity<State>,
 50}
 51
 52pub struct State {
 53    api_key_state: ApiKeyState,
 54}
 55
 56impl State {
 57    fn is_authenticated(&self) -> bool {
 58        self.api_key_state.has_key()
 59    }
 60
 61    fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
 62        let api_url = VercelLanguageModelProvider::api_url(cx);
 63        self.api_key_state
 64            .store(api_url, api_key, |this| &mut this.api_key_state, cx)
 65    }
 66
 67    fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
 68        let api_url = VercelLanguageModelProvider::api_url(cx);
 69        self.api_key_state.load_if_needed(
 70            api_url,
 71            &API_KEY_ENV_VAR,
 72            |this| &mut this.api_key_state,
 73            cx,
 74        )
 75    }
 76}
 77
 78impl VercelLanguageModelProvider {
 79    pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
 80        let state = cx.new(|cx| {
 81            cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
 82                let api_url = Self::api_url(cx);
 83                this.api_key_state.handle_url_change(
 84                    api_url,
 85                    &API_KEY_ENV_VAR,
 86                    |this| &mut this.api_key_state,
 87                    cx,
 88                );
 89                cx.notify();
 90            })
 91            .detach();
 92            State {
 93                api_key_state: ApiKeyState::new(Self::api_url(cx)),
 94            }
 95        });
 96
 97        Self { http_client, state }
 98    }
 99
100    fn create_language_model(&self, model: vercel::Model) -> Arc<dyn LanguageModel> {
101        Arc::new(VercelLanguageModel {
102            id: LanguageModelId::from(model.id().to_string()),
103            model,
104            state: self.state.clone(),
105            http_client: self.http_client.clone(),
106            request_limiter: RateLimiter::new(4),
107        })
108    }
109
110    fn settings(cx: &App) -> &VercelSettings {
111        &crate::AllLanguageModelSettings::get_global(cx).vercel
112    }
113
114    fn api_url(cx: &App) -> SharedString {
115        let api_url = &Self::settings(cx).api_url;
116        if api_url.is_empty() {
117            vercel::VERCEL_API_URL.into()
118        } else {
119            SharedString::new(api_url.as_str())
120        }
121    }
122}
123
124impl LanguageModelProviderState for VercelLanguageModelProvider {
125    type ObservableEntity = State;
126
127    fn observable_entity(&self) -> Option<gpui::Entity<Self::ObservableEntity>> {
128        Some(self.state.clone())
129    }
130}
131
132impl LanguageModelProvider for VercelLanguageModelProvider {
133    fn id(&self) -> LanguageModelProviderId {
134        PROVIDER_ID
135    }
136
137    fn name(&self) -> LanguageModelProviderName {
138        PROVIDER_NAME
139    }
140
141    fn icon(&self) -> IconName {
142        IconName::AiVZero
143    }
144
145    fn default_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
146        Some(self.create_language_model(vercel::Model::default()))
147    }
148
149    fn default_fast_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
150        Some(self.create_language_model(vercel::Model::default_fast()))
151    }
152
153    fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
154        let mut models = BTreeMap::default();
155
156        for model in vercel::Model::iter() {
157            if !matches!(model, vercel::Model::Custom { .. }) {
158                models.insert(model.id().to_string(), model);
159            }
160        }
161
162        for model in &Self::settings(cx).available_models {
163            models.insert(
164                model.name.clone(),
165                vercel::Model::Custom {
166                    name: model.name.clone(),
167                    display_name: model.display_name.clone(),
168                    max_tokens: model.max_tokens,
169                    max_output_tokens: model.max_output_tokens,
170                    max_completion_tokens: model.max_completion_tokens,
171                },
172            );
173        }
174
175        models
176            .into_values()
177            .map(|model| self.create_language_model(model))
178            .collect()
179    }
180
181    fn is_authenticated(&self, cx: &App) -> bool {
182        self.state.read(cx).is_authenticated()
183    }
184
185    fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
186        self.state.update(cx, |state, cx| state.authenticate(cx))
187    }
188
189    fn configuration_view(
190        &self,
191        _target_agent: language_model::ConfigurationViewTargetAgent,
192        window: &mut Window,
193        cx: &mut App,
194    ) -> AnyView {
195        cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx))
196            .into()
197    }
198
199    fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
200        self.state
201            .update(cx, |state, cx| state.set_api_key(None, cx))
202    }
203}
204
205pub struct VercelLanguageModel {
206    id: LanguageModelId,
207    model: vercel::Model,
208    state: gpui::Entity<State>,
209    http_client: Arc<dyn HttpClient>,
210    request_limiter: RateLimiter,
211}
212
213impl VercelLanguageModel {
214    fn stream_completion(
215        &self,
216        request: open_ai::Request,
217        cx: &AsyncApp,
218    ) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<ResponseStreamEvent>>>>
219    {
220        let http_client = self.http_client.clone();
221
222        let api_key_and_url = self.state.read_with(cx, |state, cx| {
223            let api_url = VercelLanguageModelProvider::api_url(cx);
224            let api_key = state.api_key_state.key(&api_url);
225            (api_key, api_url)
226        });
227        let (api_key, api_url) = match api_key_and_url {
228            Ok(api_key_and_url) => api_key_and_url,
229            Err(err) => {
230                return futures::future::ready(Err(err)).boxed();
231            }
232        };
233
234        let future = self.request_limiter.stream(async move {
235            let Some(api_key) = api_key else {
236                return Err(LanguageModelCompletionError::NoApiKey {
237                    provider: PROVIDER_NAME,
238                });
239            };
240            let request =
241                open_ai::stream_completion(http_client.as_ref(), &api_url, &api_key, request);
242            let response = request.await?;
243            Ok(response)
244        });
245
246        async move { Ok(future.await?.boxed()) }.boxed()
247    }
248}
249
250impl LanguageModel for VercelLanguageModel {
251    fn id(&self) -> LanguageModelId {
252        self.id.clone()
253    }
254
255    fn name(&self) -> LanguageModelName {
256        LanguageModelName::from(self.model.display_name().to_string())
257    }
258
259    fn provider_id(&self) -> LanguageModelProviderId {
260        PROVIDER_ID
261    }
262
263    fn provider_name(&self) -> LanguageModelProviderName {
264        PROVIDER_NAME
265    }
266
267    fn supports_tools(&self) -> bool {
268        true
269    }
270
271    fn supports_images(&self) -> bool {
272        true
273    }
274
275    fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
276        match choice {
277            LanguageModelToolChoice::Auto
278            | LanguageModelToolChoice::Any
279            | LanguageModelToolChoice::None => true,
280        }
281    }
282
283    fn telemetry_id(&self) -> String {
284        format!("vercel/{}", self.model.id())
285    }
286
287    fn max_token_count(&self) -> u64 {
288        self.model.max_token_count()
289    }
290
291    fn max_output_tokens(&self) -> Option<u64> {
292        self.model.max_output_tokens()
293    }
294
295    fn count_tokens(
296        &self,
297        request: LanguageModelRequest,
298        cx: &App,
299    ) -> BoxFuture<'static, Result<u64>> {
300        count_vercel_tokens(request, self.model.clone(), cx)
301    }
302
303    fn stream_completion(
304        &self,
305        request: LanguageModelRequest,
306        cx: &AsyncApp,
307    ) -> BoxFuture<
308        'static,
309        Result<
310            futures::stream::BoxStream<
311                'static,
312                Result<LanguageModelCompletionEvent, LanguageModelCompletionError>,
313            >,
314            LanguageModelCompletionError,
315        >,
316    > {
317        let request = crate::provider::open_ai::into_open_ai(
318            request,
319            self.model.id(),
320            self.model.supports_parallel_tool_calls(),
321            self.model.supports_prompt_cache_key(),
322            self.max_output_tokens(),
323            None,
324        );
325        let completions = self.stream_completion(request, cx);
326        async move {
327            let mapper = crate::provider::open_ai::OpenAiEventMapper::new();
328            Ok(mapper.map_stream(completions.await?).boxed())
329        }
330        .boxed()
331    }
332}
333
334pub fn count_vercel_tokens(
335    request: LanguageModelRequest,
336    model: Model,
337    cx: &App,
338) -> BoxFuture<'static, Result<u64>> {
339    cx.background_spawn(async move {
340        let messages = request
341            .messages
342            .into_iter()
343            .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
344                role: match message.role {
345                    Role::User => "user".into(),
346                    Role::Assistant => "assistant".into(),
347                    Role::System => "system".into(),
348                },
349                content: Some(message.string_contents()),
350                name: None,
351                function_call: None,
352            })
353            .collect::<Vec<_>>();
354
355        match model {
356            Model::Custom { max_tokens, .. } => {
357                let model = if max_tokens >= 100_000 {
358                    // If the max tokens is 100k or more, it is likely the o200k_base tokenizer from gpt4o
359                    "gpt-4o"
360                } else {
361                    // Otherwise fallback to gpt-4, since only cl100k_base and o200k_base are
362                    // supported with this tiktoken method
363                    "gpt-4"
364                };
365                tiktoken_rs::num_tokens_from_messages(model, &messages)
366            }
367            // Map Vercel models to appropriate OpenAI models for token counting
368            // since Vercel uses OpenAI-compatible API
369            Model::VZeroOnePointFiveMedium => {
370                // Vercel v0 is similar to GPT-4o, so use gpt-4o for token counting
371                tiktoken_rs::num_tokens_from_messages("gpt-4o", &messages)
372            }
373        }
374        .map(|tokens| tokens as u64)
375    })
376    .boxed()
377}
378
379struct ConfigurationView {
380    api_key_editor: Entity<SingleLineInput>,
381    state: gpui::Entity<State>,
382    load_credentials_task: Option<Task<()>>,
383}
384
385impl ConfigurationView {
386    fn new(state: gpui::Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
387        let api_key_editor = cx.new(|cx| {
388            SingleLineInput::new(
389                window,
390                cx,
391                "v1:0000000000000000000000000000000000000000000000000",
392            )
393            .label("API key")
394        });
395
396        cx.observe(&state, |_, _, cx| {
397            cx.notify();
398        })
399        .detach();
400
401        let load_credentials_task = Some(cx.spawn_in(window, {
402            let state = state.clone();
403            async move |this, cx| {
404                if let Some(task) = state
405                    .update(cx, |state, cx| state.authenticate(cx))
406                    .log_err()
407                {
408                    // We don't log an error, because "not signed in" is also an error.
409                    let _ = task.await;
410                }
411                this.update(cx, |this, cx| {
412                    this.load_credentials_task = None;
413                    cx.notify();
414                })
415                .log_err();
416            }
417        }));
418
419        Self {
420            api_key_editor,
421            state,
422            load_credentials_task,
423        }
424    }
425
426    fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
427        let api_key = self.api_key_editor.read(cx).text(cx).trim().to_string();
428        if api_key.is_empty() {
429            return;
430        }
431
432        let state = self.state.clone();
433        cx.spawn_in(window, async move |_, cx| {
434            state
435                .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))?
436                .await
437        })
438        .detach_and_log_err(cx);
439    }
440
441    fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context<Self>) {
442        self.api_key_editor.update(cx, |input, cx| {
443            input.editor.update(cx, |editor, cx| {
444                editor.set_text("", window, cx);
445            });
446        });
447
448        let state = self.state.clone();
449        cx.spawn_in(window, async move |_, cx| {
450            state
451                .update(cx, |state, cx| state.set_api_key(None, cx))?
452                .await
453        })
454        .detach_and_log_err(cx);
455    }
456
457    fn should_render_editor(&self, cx: &mut Context<Self>) -> bool {
458        !self.state.read(cx).is_authenticated()
459    }
460}
461
462impl Render for ConfigurationView {
463    fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
464        let env_var_set = self.state.read(cx).api_key_state.is_from_env_var();
465
466        let api_key_section = if self.should_render_editor(cx) {
467            v_flex()
468                .on_action(cx.listener(Self::save_api_key))
469                .child(Label::new("To use Zed's agent with Vercel v0, you need to add an API key. Follow these steps:"))
470                .child(
471                    List::new()
472                        .child(InstructionListItem::new(
473                            "Create one by visiting",
474                            Some("Vercel v0's console"),
475                            Some("https://v0.dev/chat/settings/keys"),
476                        ))
477                        .child(InstructionListItem::text_only(
478                            "Paste your API key below and hit enter to start using the agent",
479                        )),
480                )
481                .child(self.api_key_editor.clone())
482                .child(
483                    Label::new(format!(
484                        "You can also assign the {API_KEY_ENV_VAR_NAME} environment variable and restart Zed."
485                    ))
486                    .size(LabelSize::Small)
487                    .color(Color::Muted),
488                )
489                .child(
490                    Label::new("Note that Vercel v0 is a custom OpenAI-compatible provider.")
491                        .size(LabelSize::Small)
492                        .color(Color::Muted),
493                )
494                .into_any()
495        } else {
496            h_flex()
497                .mt_1()
498                .p_1()
499                .justify_between()
500                .rounded_md()
501                .border_1()
502                .border_color(cx.theme().colors().border)
503                .bg(cx.theme().colors().background)
504                .child(
505                    h_flex()
506                        .gap_1()
507                        .child(Icon::new(IconName::Check).color(Color::Success))
508                        .child(Label::new(if env_var_set {
509                            format!("API key set in {API_KEY_ENV_VAR_NAME} environment variable.")
510                        } else {
511                            "API key configured.".to_string()
512                        })),
513                )
514                .child(
515                    Button::new("reset-api-key", "Reset API Key")
516                        .label_size(LabelSize::Small)
517                        .icon(IconName::Undo)
518                        .icon_size(IconSize::Small)
519                        .icon_position(IconPosition::Start)
520                        .layer(ElevationIndex::ModalSurface)
521                        .when(env_var_set, |this| {
522                            this.tooltip(Tooltip::text(format!("To reset your API key, unset the {API_KEY_ENV_VAR_NAME} environment variable.")))
523                        })
524                        .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx))),
525                )
526                .into_any()
527        };
528
529        if self.load_credentials_task.is_some() {
530            div().child(Label::new("Loading credentials…")).into_any()
531        } else {
532            v_flex().size_full().child(api_key_section).into_any()
533        }
534    }
535}