1use anyhow::{Result, anyhow};
2use collections::BTreeMap;
3use futures::{FutureExt, StreamExt, future, future::BoxFuture};
4use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window};
5use http_client::HttpClient;
6use language_model::{
7 AuthenticateError, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
8 LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
9 LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
10 LanguageModelToolChoice, RateLimiter, Role,
11};
12use open_ai::ResponseStreamEvent;
13use schemars::JsonSchema;
14use serde::{Deserialize, Serialize};
15use settings::{Settings, SettingsStore};
16use std::sync::{Arc, LazyLock};
17use strum::IntoEnumIterator;
18use ui::{ElevationIndex, List, Tooltip, prelude::*};
19use ui_input::SingleLineInput;
20use util::{ResultExt, truncate_and_trailoff};
21use vercel::{Model, VERCEL_API_URL};
22use zed_env_vars::{EnvVar, env_var};
23
24use crate::{api_key::ApiKeyState, ui::InstructionListItem};
25
26const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("vercel");
27const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("Vercel");
28
29const API_KEY_ENV_VAR_NAME: &str = "VERCEL_API_KEY";
30static API_KEY_ENV_VAR: LazyLock<EnvVar> = env_var!(API_KEY_ENV_VAR_NAME);
31
32#[derive(Default, Clone, Debug, PartialEq)]
33pub struct VercelSettings {
34 pub api_url: String,
35 pub available_models: Vec<AvailableModel>,
36}
37
38#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
39pub struct AvailableModel {
40 pub name: String,
41 pub display_name: Option<String>,
42 pub max_tokens: u64,
43 pub max_output_tokens: Option<u64>,
44 pub max_completion_tokens: Option<u64>,
45}
46
47pub struct VercelLanguageModelProvider {
48 http_client: Arc<dyn HttpClient>,
49 state: gpui::Entity<State>,
50}
51
52pub struct State {
53 api_key_state: ApiKeyState,
54}
55
56impl State {
57 fn is_authenticated(&self) -> bool {
58 self.api_key_state.has_key()
59 }
60
61 fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
62 let api_url = VercelLanguageModelProvider::api_url(cx);
63 self.api_key_state
64 .store(api_url, api_key, |this| &mut this.api_key_state, cx)
65 }
66
67 fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
68 let api_url = VercelLanguageModelProvider::api_url(cx);
69 self.api_key_state.load_if_needed(
70 api_url,
71 &API_KEY_ENV_VAR,
72 |this| &mut this.api_key_state,
73 cx,
74 )
75 }
76}
77
78impl VercelLanguageModelProvider {
79 pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
80 let state = cx.new(|cx| {
81 cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
82 let api_url = Self::api_url(cx);
83 this.api_key_state.handle_url_change(
84 api_url,
85 &API_KEY_ENV_VAR,
86 |this| &mut this.api_key_state,
87 cx,
88 );
89 cx.notify();
90 })
91 .detach();
92 State {
93 api_key_state: ApiKeyState::new(Self::api_url(cx)),
94 }
95 });
96
97 Self { http_client, state }
98 }
99
100 fn create_language_model(&self, model: vercel::Model) -> Arc<dyn LanguageModel> {
101 Arc::new(VercelLanguageModel {
102 id: LanguageModelId::from(model.id().to_string()),
103 model,
104 state: self.state.clone(),
105 http_client: self.http_client.clone(),
106 request_limiter: RateLimiter::new(4),
107 })
108 }
109
110 fn settings(cx: &App) -> &VercelSettings {
111 &crate::AllLanguageModelSettings::get_global(cx).vercel
112 }
113
114 fn api_url(cx: &App) -> SharedString {
115 let api_url = &Self::settings(cx).api_url;
116 if api_url.is_empty() {
117 VERCEL_API_URL.into()
118 } else {
119 SharedString::new(api_url.as_str())
120 }
121 }
122}
123
124impl LanguageModelProviderState for VercelLanguageModelProvider {
125 type ObservableEntity = State;
126
127 fn observable_entity(&self) -> Option<gpui::Entity<Self::ObservableEntity>> {
128 Some(self.state.clone())
129 }
130}
131
132impl LanguageModelProvider for VercelLanguageModelProvider {
133 fn id(&self) -> LanguageModelProviderId {
134 PROVIDER_ID
135 }
136
137 fn name(&self) -> LanguageModelProviderName {
138 PROVIDER_NAME
139 }
140
141 fn icon(&self) -> IconName {
142 IconName::AiVZero
143 }
144
145 fn default_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
146 Some(self.create_language_model(vercel::Model::default()))
147 }
148
149 fn default_fast_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
150 Some(self.create_language_model(vercel::Model::default_fast()))
151 }
152
153 fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
154 let mut models = BTreeMap::default();
155
156 for model in vercel::Model::iter() {
157 if !matches!(model, vercel::Model::Custom { .. }) {
158 models.insert(model.id().to_string(), model);
159 }
160 }
161
162 for model in &Self::settings(cx).available_models {
163 models.insert(
164 model.name.clone(),
165 vercel::Model::Custom {
166 name: model.name.clone(),
167 display_name: model.display_name.clone(),
168 max_tokens: model.max_tokens,
169 max_output_tokens: model.max_output_tokens,
170 max_completion_tokens: model.max_completion_tokens,
171 },
172 );
173 }
174
175 models
176 .into_values()
177 .map(|model| self.create_language_model(model))
178 .collect()
179 }
180
181 fn is_authenticated(&self, cx: &App) -> bool {
182 self.state.read(cx).is_authenticated()
183 }
184
185 fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
186 self.state.update(cx, |state, cx| state.authenticate(cx))
187 }
188
189 fn configuration_view(
190 &self,
191 _target_agent: language_model::ConfigurationViewTargetAgent,
192 window: &mut Window,
193 cx: &mut App,
194 ) -> AnyView {
195 cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx))
196 .into()
197 }
198
199 fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
200 self.state
201 .update(cx, |state, cx| state.set_api_key(None, cx))
202 }
203}
204
205pub struct VercelLanguageModel {
206 id: LanguageModelId,
207 model: vercel::Model,
208 state: gpui::Entity<State>,
209 http_client: Arc<dyn HttpClient>,
210 request_limiter: RateLimiter,
211}
212
213impl VercelLanguageModel {
214 fn stream_completion(
215 &self,
216 request: open_ai::Request,
217 cx: &AsyncApp,
218 ) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<ResponseStreamEvent>>>>
219 {
220 let http_client = self.http_client.clone();
221
222 let Ok((api_key, api_url)) = self.state.read_with(cx, |state, cx| {
223 let api_url = VercelLanguageModelProvider::api_url(cx);
224 (state.api_key_state.key(&api_url), api_url)
225 }) else {
226 return future::ready(Err(anyhow!("App state dropped"))).boxed();
227 };
228
229 let future = self.request_limiter.stream(async move {
230 let Some(api_key) = api_key else {
231 return Err(LanguageModelCompletionError::NoApiKey {
232 provider: PROVIDER_NAME,
233 });
234 };
235 let request =
236 open_ai::stream_completion(http_client.as_ref(), &api_url, &api_key, request);
237 let response = request.await?;
238 Ok(response)
239 });
240
241 async move { Ok(future.await?.boxed()) }.boxed()
242 }
243}
244
245impl LanguageModel for VercelLanguageModel {
246 fn id(&self) -> LanguageModelId {
247 self.id.clone()
248 }
249
250 fn name(&self) -> LanguageModelName {
251 LanguageModelName::from(self.model.display_name().to_string())
252 }
253
254 fn provider_id(&self) -> LanguageModelProviderId {
255 PROVIDER_ID
256 }
257
258 fn provider_name(&self) -> LanguageModelProviderName {
259 PROVIDER_NAME
260 }
261
262 fn supports_tools(&self) -> bool {
263 true
264 }
265
266 fn supports_images(&self) -> bool {
267 true
268 }
269
270 fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
271 match choice {
272 LanguageModelToolChoice::Auto
273 | LanguageModelToolChoice::Any
274 | LanguageModelToolChoice::None => true,
275 }
276 }
277
278 fn telemetry_id(&self) -> String {
279 format!("vercel/{}", self.model.id())
280 }
281
282 fn max_token_count(&self) -> u64 {
283 self.model.max_token_count()
284 }
285
286 fn max_output_tokens(&self) -> Option<u64> {
287 self.model.max_output_tokens()
288 }
289
290 fn count_tokens(
291 &self,
292 request: LanguageModelRequest,
293 cx: &App,
294 ) -> BoxFuture<'static, Result<u64>> {
295 count_vercel_tokens(request, self.model.clone(), cx)
296 }
297
298 fn stream_completion(
299 &self,
300 request: LanguageModelRequest,
301 cx: &AsyncApp,
302 ) -> BoxFuture<
303 'static,
304 Result<
305 futures::stream::BoxStream<
306 'static,
307 Result<LanguageModelCompletionEvent, LanguageModelCompletionError>,
308 >,
309 LanguageModelCompletionError,
310 >,
311 > {
312 let request = crate::provider::open_ai::into_open_ai(
313 request,
314 self.model.id(),
315 self.model.supports_parallel_tool_calls(),
316 self.model.supports_prompt_cache_key(),
317 self.max_output_tokens(),
318 None,
319 );
320 let completions = self.stream_completion(request, cx);
321 async move {
322 let mapper = crate::provider::open_ai::OpenAiEventMapper::new();
323 Ok(mapper.map_stream(completions.await?).boxed())
324 }
325 .boxed()
326 }
327}
328
329pub fn count_vercel_tokens(
330 request: LanguageModelRequest,
331 model: Model,
332 cx: &App,
333) -> BoxFuture<'static, Result<u64>> {
334 cx.background_spawn(async move {
335 let messages = request
336 .messages
337 .into_iter()
338 .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
339 role: match message.role {
340 Role::User => "user".into(),
341 Role::Assistant => "assistant".into(),
342 Role::System => "system".into(),
343 },
344 content: Some(message.string_contents()),
345 name: None,
346 function_call: None,
347 })
348 .collect::<Vec<_>>();
349
350 match model {
351 Model::Custom { max_tokens, .. } => {
352 let model = if max_tokens >= 100_000 {
353 // If the max tokens is 100k or more, it is likely the o200k_base tokenizer from gpt4o
354 "gpt-4o"
355 } else {
356 // Otherwise fallback to gpt-4, since only cl100k_base and o200k_base are
357 // supported with this tiktoken method
358 "gpt-4"
359 };
360 tiktoken_rs::num_tokens_from_messages(model, &messages)
361 }
362 // Map Vercel models to appropriate OpenAI models for token counting
363 // since Vercel uses OpenAI-compatible API
364 Model::VZeroOnePointFiveMedium => {
365 // Vercel v0 is similar to GPT-4o, so use gpt-4o for token counting
366 tiktoken_rs::num_tokens_from_messages("gpt-4o", &messages)
367 }
368 }
369 .map(|tokens| tokens as u64)
370 })
371 .boxed()
372}
373
374struct ConfigurationView {
375 api_key_editor: Entity<SingleLineInput>,
376 state: gpui::Entity<State>,
377 load_credentials_task: Option<Task<()>>,
378}
379
380impl ConfigurationView {
381 fn new(state: gpui::Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
382 let api_key_editor = cx.new(|cx| {
383 SingleLineInput::new(
384 window,
385 cx,
386 "v1:0000000000000000000000000000000000000000000000000",
387 )
388 .label("API key")
389 });
390
391 cx.observe(&state, |_, _, cx| {
392 cx.notify();
393 })
394 .detach();
395
396 let load_credentials_task = Some(cx.spawn_in(window, {
397 let state = state.clone();
398 async move |this, cx| {
399 if let Some(task) = state
400 .update(cx, |state, cx| state.authenticate(cx))
401 .log_err()
402 {
403 // We don't log an error, because "not signed in" is also an error.
404 let _ = task.await;
405 }
406 this.update(cx, |this, cx| {
407 this.load_credentials_task = None;
408 cx.notify();
409 })
410 .log_err();
411 }
412 }));
413
414 Self {
415 api_key_editor,
416 state,
417 load_credentials_task,
418 }
419 }
420
421 fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
422 let api_key = self.api_key_editor.read(cx).text(cx).trim().to_string();
423 if api_key.is_empty() {
424 return;
425 }
426
427 // url changes can cause the editor to be displayed again
428 self.api_key_editor
429 .update(cx, |editor, cx| editor.set_text("", window, cx));
430
431 let state = self.state.clone();
432 cx.spawn_in(window, async move |_, cx| {
433 state
434 .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))?
435 .await
436 })
437 .detach_and_log_err(cx);
438 }
439
440 fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context<Self>) {
441 self.api_key_editor
442 .update(cx, |input, cx| input.set_text("", window, cx));
443
444 let state = self.state.clone();
445 cx.spawn_in(window, async move |_, cx| {
446 state
447 .update(cx, |state, cx| state.set_api_key(None, cx))?
448 .await
449 })
450 .detach_and_log_err(cx);
451 }
452
453 fn should_render_editor(&self, cx: &mut Context<Self>) -> bool {
454 !self.state.read(cx).is_authenticated()
455 }
456}
457
458impl Render for ConfigurationView {
459 fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
460 let env_var_set = self.state.read(cx).api_key_state.is_from_env_var();
461
462 let api_key_section = if self.should_render_editor(cx) {
463 v_flex()
464 .on_action(cx.listener(Self::save_api_key))
465 .child(Label::new("To use Zed's agent with Vercel v0, you need to add an API key. Follow these steps:"))
466 .child(
467 List::new()
468 .child(InstructionListItem::new(
469 "Create one by visiting",
470 Some("Vercel v0's console"),
471 Some("https://v0.dev/chat/settings/keys"),
472 ))
473 .child(InstructionListItem::text_only(
474 "Paste your API key below and hit enter to start using the agent",
475 )),
476 )
477 .child(self.api_key_editor.clone())
478 .child(
479 Label::new(format!(
480 "You can also assign the {API_KEY_ENV_VAR_NAME} environment variable and restart Zed."
481 ))
482 .size(LabelSize::Small)
483 .color(Color::Muted),
484 )
485 .child(
486 Label::new("Note that Vercel v0 is a custom OpenAI-compatible provider.")
487 .size(LabelSize::Small)
488 .color(Color::Muted),
489 )
490 .into_any()
491 } else {
492 h_flex()
493 .mt_1()
494 .p_1()
495 .justify_between()
496 .rounded_md()
497 .border_1()
498 .border_color(cx.theme().colors().border)
499 .bg(cx.theme().colors().background)
500 .child(
501 h_flex()
502 .gap_1()
503 .child(Icon::new(IconName::Check).color(Color::Success))
504 .child(Label::new(if env_var_set {
505 format!("API key set in {API_KEY_ENV_VAR_NAME} environment variable")
506 } else {
507 let api_url = VercelLanguageModelProvider::api_url(cx);
508 if api_url == VERCEL_API_URL {
509 "API key configured".to_string()
510 } else {
511 format!("API key configured for {}", truncate_and_trailoff(&api_url, 32))
512 }
513 })),
514 )
515 .child(
516 Button::new("reset-api-key", "Reset API Key")
517 .label_size(LabelSize::Small)
518 .icon(IconName::Undo)
519 .icon_size(IconSize::Small)
520 .icon_position(IconPosition::Start)
521 .layer(ElevationIndex::ModalSurface)
522 .when(env_var_set, |this| {
523 this.tooltip(Tooltip::text(format!("To reset your API key, unset the {API_KEY_ENV_VAR_NAME} environment variable.")))
524 })
525 .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx))),
526 )
527 .into_any()
528 };
529
530 if self.load_credentials_task.is_some() {
531 div().child(Label::new("Loading credentials…")).into_any()
532 } else {
533 v_flex().size_full().child(api_key_section).into_any()
534 }
535 }
536}