open_ai_compatible.rs

  1use anyhow::{Context as _, Result, anyhow};
  2use credentials_provider::CredentialsProvider;
  3
  4use convert_case::{Case, Casing};
  5use futures::{FutureExt, StreamExt, future::BoxFuture};
  6use gpui::{AnyView, App, AsyncApp, Context, Entity, Subscription, Task, Window};
  7use http_client::HttpClient;
  8use language_model::{
  9    AuthenticateError, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
 10    LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
 11    LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
 12    LanguageModelToolChoice, RateLimiter,
 13};
 14use menu;
 15use open_ai::{ResponseStreamEvent, stream_completion};
 16use schemars::JsonSchema;
 17use serde::{Deserialize, Serialize};
 18use settings::{Settings, SettingsStore};
 19use std::sync::Arc;
 20
 21use ui::{ElevationIndex, Tooltip, prelude::*};
 22use ui_input::SingleLineInput;
 23use util::ResultExt;
 24
 25use crate::AllLanguageModelSettings;
 26use crate::provider::open_ai::{OpenAiEventMapper, into_open_ai};
 27
 28#[derive(Default, Clone, Debug, PartialEq)]
 29pub struct OpenAiCompatibleSettings {
 30    pub api_url: String,
 31    pub available_models: Vec<AvailableModel>,
 32}
 33
 34#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
 35pub struct AvailableModel {
 36    pub name: String,
 37    pub display_name: Option<String>,
 38    pub max_tokens: u64,
 39    pub max_output_tokens: Option<u64>,
 40    pub max_completion_tokens: Option<u64>,
 41    #[serde(default)]
 42    pub capabilities: ModelCapabilities,
 43}
 44
 45#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
 46pub struct ModelCapabilities {
 47    pub tools: bool,
 48    pub images: bool,
 49    pub parallel_tool_calls: bool,
 50    pub prompt_cache_key: bool,
 51}
 52
 53impl Default for ModelCapabilities {
 54    fn default() -> Self {
 55        Self {
 56            tools: true,
 57            images: false,
 58            parallel_tool_calls: false,
 59            prompt_cache_key: false,
 60        }
 61    }
 62}
 63
 64pub struct OpenAiCompatibleLanguageModelProvider {
 65    id: LanguageModelProviderId,
 66    name: LanguageModelProviderName,
 67    http_client: Arc<dyn HttpClient>,
 68    state: gpui::Entity<State>,
 69}
 70
 71pub struct State {
 72    id: Arc<str>,
 73    env_var_name: Arc<str>,
 74    api_key: Option<String>,
 75    api_key_from_env: bool,
 76    settings: OpenAiCompatibleSettings,
 77    _subscription: Subscription,
 78}
 79
 80impl State {
 81    fn is_authenticated(&self) -> bool {
 82        self.api_key.is_some()
 83    }
 84
 85    fn reset_api_key(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
 86        let credentials_provider = <dyn CredentialsProvider>::global(cx);
 87        let api_url = self.settings.api_url.clone();
 88        cx.spawn(async move |this, cx| {
 89            credentials_provider
 90                .delete_credentials(&api_url, &cx)
 91                .await
 92                .log_err();
 93            this.update(cx, |this, cx| {
 94                this.api_key = None;
 95                this.api_key_from_env = false;
 96                cx.notify();
 97            })
 98        })
 99    }
100
101    fn set_api_key(&mut self, api_key: String, cx: &mut Context<Self>) -> Task<Result<()>> {
102        let credentials_provider = <dyn CredentialsProvider>::global(cx);
103        let api_url = self.settings.api_url.clone();
104        cx.spawn(async move |this, cx| {
105            credentials_provider
106                .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx)
107                .await
108                .log_err();
109            this.update(cx, |this, cx| {
110                this.api_key = Some(api_key);
111                cx.notify();
112            })
113        })
114    }
115
116    fn authenticate(&self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
117        if self.is_authenticated() {
118            return Task::ready(Ok(()));
119        }
120
121        let credentials_provider = <dyn CredentialsProvider>::global(cx);
122        let env_var_name = self.env_var_name.clone();
123        let api_url = self.settings.api_url.clone();
124        cx.spawn(async move |this, cx| {
125            let (api_key, from_env) = if let Ok(api_key) = std::env::var(env_var_name.as_ref()) {
126                (api_key, true)
127            } else {
128                let (_, api_key) = credentials_provider
129                    .read_credentials(&api_url, &cx)
130                    .await?
131                    .ok_or(AuthenticateError::CredentialsNotFound)?;
132                (
133                    String::from_utf8(api_key).context("invalid {PROVIDER_NAME} API key")?,
134                    false,
135                )
136            };
137            this.update(cx, |this, cx| {
138                this.api_key = Some(api_key);
139                this.api_key_from_env = from_env;
140                cx.notify();
141            })?;
142
143            Ok(())
144        })
145    }
146}
147
148impl OpenAiCompatibleLanguageModelProvider {
149    pub fn new(id: Arc<str>, http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
150        fn resolve_settings<'a>(id: &'a str, cx: &'a App) -> Option<&'a OpenAiCompatibleSettings> {
151            AllLanguageModelSettings::get_global(cx)
152                .openai_compatible
153                .get(id)
154        }
155
156        let state = cx.new(|cx| State {
157            id: id.clone(),
158            env_var_name: format!("{}_API_KEY", id).to_case(Case::Constant).into(),
159            settings: resolve_settings(&id, cx).cloned().unwrap_or_default(),
160            api_key: None,
161            api_key_from_env: false,
162            _subscription: cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
163                let Some(settings) = resolve_settings(&this.id, cx) else {
164                    return;
165                };
166                if &this.settings != settings {
167                    this.settings = settings.clone();
168                    cx.notify();
169                }
170            }),
171        });
172
173        Self {
174            id: id.clone().into(),
175            name: id.into(),
176            http_client,
177            state,
178        }
179    }
180
181    fn create_language_model(&self, model: AvailableModel) -> Arc<dyn LanguageModel> {
182        Arc::new(OpenAiCompatibleLanguageModel {
183            id: LanguageModelId::from(model.name.clone()),
184            provider_id: self.id.clone(),
185            provider_name: self.name.clone(),
186            model,
187            state: self.state.clone(),
188            http_client: self.http_client.clone(),
189            request_limiter: RateLimiter::new(4),
190        })
191    }
192}
193
194impl LanguageModelProviderState for OpenAiCompatibleLanguageModelProvider {
195    type ObservableEntity = State;
196
197    fn observable_entity(&self) -> Option<gpui::Entity<Self::ObservableEntity>> {
198        Some(self.state.clone())
199    }
200}
201
202impl LanguageModelProvider for OpenAiCompatibleLanguageModelProvider {
203    fn id(&self) -> LanguageModelProviderId {
204        self.id.clone()
205    }
206
207    fn name(&self) -> LanguageModelProviderName {
208        self.name.clone()
209    }
210
211    fn icon(&self) -> IconName {
212        IconName::AiOpenAiCompat
213    }
214
215    fn default_model(&self, cx: &App) -> Option<Arc<dyn LanguageModel>> {
216        self.state
217            .read(cx)
218            .settings
219            .available_models
220            .first()
221            .map(|model| self.create_language_model(model.clone()))
222    }
223
224    fn default_fast_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
225        None
226    }
227
228    fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
229        self.state
230            .read(cx)
231            .settings
232            .available_models
233            .iter()
234            .map(|model| self.create_language_model(model.clone()))
235            .collect()
236    }
237
238    fn is_authenticated(&self, cx: &App) -> bool {
239        self.state.read(cx).is_authenticated()
240    }
241
242    fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
243        self.state.update(cx, |state, cx| state.authenticate(cx))
244    }
245
246    fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView {
247        cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx))
248            .into()
249    }
250
251    fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
252        self.state.update(cx, |state, cx| state.reset_api_key(cx))
253    }
254}
255
256pub struct OpenAiCompatibleLanguageModel {
257    id: LanguageModelId,
258    provider_id: LanguageModelProviderId,
259    provider_name: LanguageModelProviderName,
260    model: AvailableModel,
261    state: gpui::Entity<State>,
262    http_client: Arc<dyn HttpClient>,
263    request_limiter: RateLimiter,
264}
265
266impl OpenAiCompatibleLanguageModel {
267    fn stream_completion(
268        &self,
269        request: open_ai::Request,
270        cx: &AsyncApp,
271    ) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<ResponseStreamEvent>>>>
272    {
273        let http_client = self.http_client.clone();
274        let Ok((api_key, api_url)) = cx.read_entity(&self.state, |state, _| {
275            (state.api_key.clone(), state.settings.api_url.clone())
276        }) else {
277            return futures::future::ready(Err(anyhow!("App state dropped"))).boxed();
278        };
279
280        let provider = self.provider_name.clone();
281        let future = self.request_limiter.stream(async move {
282            let Some(api_key) = api_key else {
283                return Err(LanguageModelCompletionError::NoApiKey { provider });
284            };
285            let request = stream_completion(http_client.as_ref(), &api_url, &api_key, request);
286            let response = request.await?;
287            Ok(response)
288        });
289
290        async move { Ok(future.await?.boxed()) }.boxed()
291    }
292}
293
294impl LanguageModel for OpenAiCompatibleLanguageModel {
295    fn id(&self) -> LanguageModelId {
296        self.id.clone()
297    }
298
299    fn name(&self) -> LanguageModelName {
300        LanguageModelName::from(
301            self.model
302                .display_name
303                .clone()
304                .unwrap_or_else(|| self.model.name.clone()),
305        )
306    }
307
308    fn provider_id(&self) -> LanguageModelProviderId {
309        self.provider_id.clone()
310    }
311
312    fn provider_name(&self) -> LanguageModelProviderName {
313        self.provider_name.clone()
314    }
315
316    fn supports_tools(&self) -> bool {
317        self.model.capabilities.tools
318    }
319
320    fn supports_images(&self) -> bool {
321        self.model.capabilities.images
322    }
323
324    fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
325        match choice {
326            LanguageModelToolChoice::Auto => self.model.capabilities.tools,
327            LanguageModelToolChoice::Any => self.model.capabilities.tools,
328            LanguageModelToolChoice::None => true,
329        }
330    }
331
332    fn telemetry_id(&self) -> String {
333        format!("openai/{}", self.model.name)
334    }
335
336    fn max_token_count(&self) -> u64 {
337        self.model.max_tokens
338    }
339
340    fn max_output_tokens(&self) -> Option<u64> {
341        self.model.max_output_tokens
342    }
343
344    fn count_tokens(
345        &self,
346        request: LanguageModelRequest,
347        cx: &App,
348    ) -> BoxFuture<'static, Result<u64>> {
349        let max_token_count = self.max_token_count();
350        cx.background_spawn(async move {
351            let messages = super::open_ai::collect_tiktoken_messages(request);
352            let model = if max_token_count >= 100_000 {
353                // If the max tokens is 100k or more, it is likely the o200k_base tokenizer from gpt4o
354                "gpt-4o"
355            } else {
356                // Otherwise fallback to gpt-4, since only cl100k_base and o200k_base are
357                // supported with this tiktoken method
358                "gpt-4"
359            };
360            tiktoken_rs::num_tokens_from_messages(model, &messages).map(|tokens| tokens as u64)
361        })
362        .boxed()
363    }
364
365    fn stream_completion(
366        &self,
367        request: LanguageModelRequest,
368        cx: &AsyncApp,
369    ) -> BoxFuture<
370        'static,
371        Result<
372            futures::stream::BoxStream<
373                'static,
374                Result<LanguageModelCompletionEvent, LanguageModelCompletionError>,
375            >,
376            LanguageModelCompletionError,
377        >,
378    > {
379        let request = into_open_ai(
380            request,
381            &self.model.name,
382            self.model.capabilities.parallel_tool_calls,
383            self.model.capabilities.prompt_cache_key,
384            self.max_output_tokens(),
385            None,
386        );
387        let completions = self.stream_completion(request, cx);
388        async move {
389            let mapper = OpenAiEventMapper::new();
390            Ok(mapper.map_stream(completions.await?).boxed())
391        }
392        .boxed()
393    }
394}
395
396struct ConfigurationView {
397    api_key_editor: Entity<SingleLineInput>,
398    state: gpui::Entity<State>,
399    load_credentials_task: Option<Task<()>>,
400}
401
402impl ConfigurationView {
403    fn new(state: gpui::Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
404        let api_key_editor = cx.new(|cx| {
405            SingleLineInput::new(
406                window,
407                cx,
408                "000000000000000000000000000000000000000000000000000",
409            )
410        });
411
412        cx.observe(&state, |_, _, cx| {
413            cx.notify();
414        })
415        .detach();
416
417        let load_credentials_task = Some(cx.spawn_in(window, {
418            let state = state.clone();
419            async move |this, cx| {
420                if let Some(task) = state
421                    .update(cx, |state, cx| state.authenticate(cx))
422                    .log_err()
423                {
424                    // We don't log an error, because "not signed in" is also an error.
425                    let _ = task.await;
426                }
427                this.update(cx, |this, cx| {
428                    this.load_credentials_task = None;
429                    cx.notify();
430                })
431                .log_err();
432            }
433        }));
434
435        Self {
436            api_key_editor,
437            state,
438            load_credentials_task,
439        }
440    }
441
442    fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
443        let api_key = self
444            .api_key_editor
445            .read(cx)
446            .editor()
447            .read(cx)
448            .text(cx)
449            .trim()
450            .to_string();
451
452        // Don't proceed if no API key is provided and we're not authenticated
453        if api_key.is_empty() && !self.state.read(cx).is_authenticated() {
454            return;
455        }
456
457        let state = self.state.clone();
458        cx.spawn_in(window, async move |_, cx| {
459            state
460                .update(cx, |state, cx| state.set_api_key(api_key, cx))?
461                .await
462        })
463        .detach_and_log_err(cx);
464
465        cx.notify();
466    }
467
468    fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context<Self>) {
469        self.api_key_editor.update(cx, |input, cx| {
470            input.editor.update(cx, |editor, cx| {
471                editor.set_text("", window, cx);
472            });
473        });
474
475        let state = self.state.clone();
476        cx.spawn_in(window, async move |_, cx| {
477            state.update(cx, |state, cx| state.reset_api_key(cx))?.await
478        })
479        .detach_and_log_err(cx);
480
481        cx.notify();
482    }
483
484    fn should_render_editor(&self, cx: &mut Context<Self>) -> bool {
485        !self.state.read(cx).is_authenticated()
486    }
487}
488
489impl Render for ConfigurationView {
490    fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
491        let env_var_set = self.state.read(cx).api_key_from_env;
492        let env_var_name = self.state.read(cx).env_var_name.clone();
493
494        let api_key_section = if self.should_render_editor(cx) {
495            v_flex()
496                .on_action(cx.listener(Self::save_api_key))
497                .child(Label::new("To use Zed's agent with an OpenAI-compatible provider, you need to add an API key."))
498                .child(
499                    div()
500                        .pt(DynamicSpacing::Base04.rems(cx))
501                        .child(self.api_key_editor.clone())
502                )
503                .child(
504                    Label::new(
505                        format!("You can also assign the {env_var_name} environment variable and restart Zed."),
506                    )
507                    .size(LabelSize::Small).color(Color::Muted),
508                )
509                .into_any()
510        } else {
511            h_flex()
512                .mt_1()
513                .p_1()
514                .justify_between()
515                .rounded_md()
516                .border_1()
517                .border_color(cx.theme().colors().border)
518                .bg(cx.theme().colors().background)
519                .child(
520                    h_flex()
521                        .gap_1()
522                        .child(Icon::new(IconName::Check).color(Color::Success))
523                        .child(Label::new(if env_var_set {
524                            format!("API key set in {env_var_name} environment variable.")
525                        } else {
526                            "API key configured.".to_string()
527                        })),
528                )
529                .child(
530                    Button::new("reset-api-key", "Reset API Key")
531                        .label_size(LabelSize::Small)
532                        .icon(IconName::Undo)
533                        .icon_size(IconSize::Small)
534                        .icon_position(IconPosition::Start)
535                        .layer(ElevationIndex::ModalSurface)
536                        .when(env_var_set, |this| {
537                            this.tooltip(Tooltip::text(format!("To reset your API key, unset the {env_var_name} environment variable.")))
538                        })
539                        .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx))),
540                )
541                .into_any()
542        };
543
544        if self.load_credentials_task.is_some() {
545            div().child(Label::new("Loading credentials…")).into_any()
546        } else {
547            v_flex().size_full().child(api_key_section).into_any()
548        }
549    }
550}