open_ai_compatible.rs

  1use anyhow::Result;
  2use convert_case::{Case, Casing};
  3use futures::{FutureExt, StreamExt, future::BoxFuture};
  4use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window};
  5use http_client::HttpClient;
  6use language_model::{
  7    ApiKeyState, AuthenticateError, EnvVar, IconOrSvg, LanguageModel, LanguageModelCompletionError,
  8    LanguageModelCompletionEvent, LanguageModelId, LanguageModelName, LanguageModelProvider,
  9    LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState,
 10    LanguageModelRequest, LanguageModelToolChoice, LanguageModelToolSchemaFormat, RateLimiter,
 11};
 12use menu;
 13use open_ai::{
 14    ResponseStreamEvent,
 15    responses::{Request as ResponseRequest, StreamEvent as ResponsesStreamEvent, stream_response},
 16    stream_completion,
 17};
 18use settings::{Settings, SettingsStore};
 19use std::sync::Arc;
 20use ui::{ElevationIndex, Tooltip, prelude::*};
 21use ui_input::InputField;
 22use util::ResultExt;
 23
 24use crate::provider::open_ai::{
 25    OpenAiEventMapper, OpenAiResponseEventMapper, into_open_ai, into_open_ai_response,
 26};
 27pub use settings::OpenAiCompatibleAvailableModel as AvailableModel;
 28pub use settings::OpenAiCompatibleModelCapabilities as ModelCapabilities;
 29
 30#[derive(Default, Clone, Debug, PartialEq)]
 31pub struct OpenAiCompatibleSettings {
 32    pub api_url: String,
 33    pub available_models: Vec<AvailableModel>,
 34}
 35
 36pub struct OpenAiCompatibleLanguageModelProvider {
 37    id: LanguageModelProviderId,
 38    name: LanguageModelProviderName,
 39    http_client: Arc<dyn HttpClient>,
 40    state: Entity<State>,
 41}
 42
 43pub struct State {
 44    id: Arc<str>,
 45    api_key_state: ApiKeyState,
 46    settings: OpenAiCompatibleSettings,
 47}
 48
 49impl State {
 50    fn is_authenticated(&self) -> bool {
 51        self.api_key_state.has_key()
 52    }
 53
 54    fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
 55        let api_url = SharedString::new(self.settings.api_url.as_str());
 56        self.api_key_state
 57            .store(api_url, api_key, |this| &mut this.api_key_state, cx)
 58    }
 59
 60    fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
 61        let api_url = SharedString::new(self.settings.api_url.clone());
 62        self.api_key_state
 63            .load_if_needed(api_url, |this| &mut this.api_key_state, cx)
 64    }
 65}
 66
 67impl OpenAiCompatibleLanguageModelProvider {
 68    pub fn new(id: Arc<str>, http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
 69        fn resolve_settings<'a>(id: &'a str, cx: &'a App) -> Option<&'a OpenAiCompatibleSettings> {
 70            crate::AllLanguageModelSettings::get_global(cx)
 71                .openai_compatible
 72                .get(id)
 73        }
 74
 75        let api_key_env_var_name = format!("{}_API_KEY", id).to_case(Case::UpperSnake).into();
 76        let state = cx.new(|cx| {
 77            cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
 78                let Some(settings) = resolve_settings(&this.id, cx).cloned() else {
 79                    return;
 80                };
 81                if &this.settings != &settings {
 82                    let api_url = SharedString::new(settings.api_url.as_str());
 83                    this.api_key_state.handle_url_change(
 84                        api_url,
 85                        |this| &mut this.api_key_state,
 86                        cx,
 87                    );
 88                    this.settings = settings;
 89                    cx.notify();
 90                }
 91            })
 92            .detach();
 93            let settings = resolve_settings(&id, cx).cloned().unwrap_or_default();
 94            State {
 95                id: id.clone(),
 96                api_key_state: ApiKeyState::new(
 97                    SharedString::new(settings.api_url.as_str()),
 98                    EnvVar::new(api_key_env_var_name),
 99                ),
100                settings,
101            }
102        });
103
104        Self {
105            id: id.clone().into(),
106            name: id.into(),
107            http_client,
108            state,
109        }
110    }
111
112    fn create_language_model(&self, model: AvailableModel) -> Arc<dyn LanguageModel> {
113        Arc::new(OpenAiCompatibleLanguageModel {
114            id: LanguageModelId::from(model.name.clone()),
115            provider_id: self.id.clone(),
116            provider_name: self.name.clone(),
117            model,
118            state: self.state.clone(),
119            http_client: self.http_client.clone(),
120            request_limiter: RateLimiter::new(4),
121        })
122    }
123}
124
125impl LanguageModelProviderState for OpenAiCompatibleLanguageModelProvider {
126    type ObservableEntity = State;
127
128    fn observable_entity(&self) -> Option<Entity<Self::ObservableEntity>> {
129        Some(self.state.clone())
130    }
131}
132
133impl LanguageModelProvider for OpenAiCompatibleLanguageModelProvider {
134    fn id(&self) -> LanguageModelProviderId {
135        self.id.clone()
136    }
137
138    fn name(&self) -> LanguageModelProviderName {
139        self.name.clone()
140    }
141
142    fn icon(&self) -> IconOrSvg {
143        IconOrSvg::Icon(IconName::AiOpenAiCompat)
144    }
145
146    fn default_model(&self, cx: &App) -> Option<Arc<dyn LanguageModel>> {
147        self.state
148            .read(cx)
149            .settings
150            .available_models
151            .first()
152            .map(|model| self.create_language_model(model.clone()))
153    }
154
155    fn default_fast_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
156        None
157    }
158
159    fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
160        self.state
161            .read(cx)
162            .settings
163            .available_models
164            .iter()
165            .map(|model| self.create_language_model(model.clone()))
166            .collect()
167    }
168
169    fn is_authenticated(&self, cx: &App) -> bool {
170        self.state.read(cx).is_authenticated()
171    }
172
173    fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
174        self.state.update(cx, |state, cx| state.authenticate(cx))
175    }
176
177    fn configuration_view(
178        &self,
179        _target_agent: language_model::ConfigurationViewTargetAgent,
180        window: &mut Window,
181        cx: &mut App,
182    ) -> AnyView {
183        cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx))
184            .into()
185    }
186
187    fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
188        self.state
189            .update(cx, |state, cx| state.set_api_key(None, cx))
190    }
191}
192
193pub struct OpenAiCompatibleLanguageModel {
194    id: LanguageModelId,
195    provider_id: LanguageModelProviderId,
196    provider_name: LanguageModelProviderName,
197    model: AvailableModel,
198    state: Entity<State>,
199    http_client: Arc<dyn HttpClient>,
200    request_limiter: RateLimiter,
201}
202
203impl OpenAiCompatibleLanguageModel {
204    fn stream_completion(
205        &self,
206        request: open_ai::Request,
207        cx: &AsyncApp,
208    ) -> BoxFuture<
209        'static,
210        Result<
211            futures::stream::BoxStream<'static, Result<ResponseStreamEvent>>,
212            LanguageModelCompletionError,
213        >,
214    > {
215        let http_client = self.http_client.clone();
216
217        let (api_key, api_url) = self.state.read_with(cx, |state, _cx| {
218            let api_url = &state.settings.api_url;
219            (
220                state.api_key_state.key(api_url),
221                state.settings.api_url.clone(),
222            )
223        });
224
225        let provider = self.provider_name.clone();
226        let future = self.request_limiter.stream(async move {
227            let Some(api_key) = api_key else {
228                return Err(LanguageModelCompletionError::NoApiKey { provider });
229            };
230            let request = stream_completion(
231                http_client.as_ref(),
232                provider.0.as_str(),
233                &api_url,
234                &api_key,
235                request,
236            );
237            let response = request.await?;
238            Ok(response)
239        });
240
241        async move { Ok(future.await?.boxed()) }.boxed()
242    }
243
244    fn stream_response(
245        &self,
246        request: ResponseRequest,
247        cx: &AsyncApp,
248    ) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<ResponsesStreamEvent>>>>
249    {
250        let http_client = self.http_client.clone();
251
252        let (api_key, api_url) = self.state.read_with(cx, |state, _cx| {
253            let api_url = &state.settings.api_url;
254            (
255                state.api_key_state.key(api_url),
256                state.settings.api_url.clone(),
257            )
258        });
259
260        let provider = self.provider_name.clone();
261        let future = self.request_limiter.stream(async move {
262            let Some(api_key) = api_key else {
263                return Err(LanguageModelCompletionError::NoApiKey { provider });
264            };
265            let request = stream_response(
266                http_client.as_ref(),
267                provider.0.as_str(),
268                &api_url,
269                &api_key,
270                request,
271            );
272            let response = request.await?;
273            Ok(response)
274        });
275
276        async move { Ok(future.await?.boxed()) }.boxed()
277    }
278}
279
280impl LanguageModel for OpenAiCompatibleLanguageModel {
281    fn id(&self) -> LanguageModelId {
282        self.id.clone()
283    }
284
285    fn name(&self) -> LanguageModelName {
286        LanguageModelName::from(
287            self.model
288                .display_name
289                .clone()
290                .unwrap_or_else(|| self.model.name.clone()),
291        )
292    }
293
294    fn provider_id(&self) -> LanguageModelProviderId {
295        self.provider_id.clone()
296    }
297
298    fn provider_name(&self) -> LanguageModelProviderName {
299        self.provider_name.clone()
300    }
301
302    fn supports_tools(&self) -> bool {
303        self.model.capabilities.tools
304    }
305
306    fn tool_input_format(&self) -> LanguageModelToolSchemaFormat {
307        LanguageModelToolSchemaFormat::JsonSchemaSubset
308    }
309
310    fn supports_images(&self) -> bool {
311        self.model.capabilities.images
312    }
313
314    fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
315        match choice {
316            LanguageModelToolChoice::Auto => self.model.capabilities.tools,
317            LanguageModelToolChoice::Any => self.model.capabilities.tools,
318            LanguageModelToolChoice::None => true,
319        }
320    }
321
322    fn supports_split_token_display(&self) -> bool {
323        true
324    }
325
326    fn telemetry_id(&self) -> String {
327        format!("openai/{}", self.model.name)
328    }
329
330    fn max_token_count(&self) -> u64 {
331        self.model.max_tokens
332    }
333
334    fn max_output_tokens(&self) -> Option<u64> {
335        self.model.max_output_tokens
336    }
337
338    fn count_tokens(
339        &self,
340        request: LanguageModelRequest,
341        cx: &App,
342    ) -> BoxFuture<'static, Result<u64>> {
343        let max_token_count = self.max_token_count();
344        cx.background_spawn(async move {
345            let messages = super::open_ai::collect_tiktoken_messages(request);
346            let model = if max_token_count >= 100_000 {
347                // If the max tokens is 100k or more, it is likely the o200k_base tokenizer from gpt4o
348                "gpt-4o"
349            } else {
350                // Otherwise fallback to gpt-4, since only cl100k_base and o200k_base are
351                // supported with this tiktoken method
352                "gpt-4"
353            };
354            tiktoken_rs::num_tokens_from_messages(model, &messages).map(|tokens| tokens as u64)
355        })
356        .boxed()
357    }
358
359    fn stream_completion(
360        &self,
361        request: LanguageModelRequest,
362        cx: &AsyncApp,
363    ) -> BoxFuture<
364        'static,
365        Result<
366            futures::stream::BoxStream<
367                'static,
368                Result<LanguageModelCompletionEvent, LanguageModelCompletionError>,
369            >,
370            LanguageModelCompletionError,
371        >,
372    > {
373        if self.model.capabilities.chat_completions {
374            let request = into_open_ai(
375                request,
376                &self.model.name,
377                self.model.capabilities.parallel_tool_calls,
378                self.model.capabilities.prompt_cache_key,
379                self.max_output_tokens(),
380                None,
381            );
382            let completions = self.stream_completion(request, cx);
383            async move {
384                let mapper = OpenAiEventMapper::new();
385                Ok(mapper.map_stream(completions.await?).boxed())
386            }
387            .boxed()
388        } else {
389            let request = into_open_ai_response(
390                request,
391                &self.model.name,
392                self.model.capabilities.parallel_tool_calls,
393                self.model.capabilities.prompt_cache_key,
394                self.max_output_tokens(),
395                None,
396            );
397            let completions = self.stream_response(request, cx);
398            async move {
399                let mapper = OpenAiResponseEventMapper::new();
400                Ok(mapper.map_stream(completions.await?).boxed())
401            }
402            .boxed()
403        }
404    }
405}
406
407struct ConfigurationView {
408    api_key_editor: Entity<InputField>,
409    state: Entity<State>,
410    load_credentials_task: Option<Task<()>>,
411}
412
413impl ConfigurationView {
414    fn new(state: Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
415        let api_key_editor = cx.new(|cx| {
416            InputField::new(
417                window,
418                cx,
419                "000000000000000000000000000000000000000000000000000",
420            )
421        });
422
423        cx.observe(&state, |_, _, cx| {
424            cx.notify();
425        })
426        .detach();
427
428        let load_credentials_task = Some(cx.spawn_in(window, {
429            let state = state.clone();
430            async move |this, cx| {
431                if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) {
432                    // We don't log an error, because "not signed in" is also an error.
433                    let _ = task.await;
434                }
435                this.update(cx, |this, cx| {
436                    this.load_credentials_task = None;
437                    cx.notify();
438                })
439                .log_err();
440            }
441        }));
442
443        Self {
444            api_key_editor,
445            state,
446            load_credentials_task,
447        }
448    }
449
450    fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
451        let api_key = self.api_key_editor.read(cx).text(cx).trim().to_string();
452        if api_key.is_empty() {
453            return;
454        }
455
456        // url changes can cause the editor to be displayed again
457        self.api_key_editor
458            .update(cx, |input, cx| input.set_text("", window, cx));
459
460        let state = self.state.clone();
461        cx.spawn_in(window, async move |_, cx| {
462            state
463                .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))
464                .await
465        })
466        .detach_and_log_err(cx);
467    }
468
469    fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context<Self>) {
470        self.api_key_editor
471            .update(cx, |input, cx| input.set_text("", window, cx));
472
473        let state = self.state.clone();
474        cx.spawn_in(window, async move |_, cx| {
475            state
476                .update(cx, |state, cx| state.set_api_key(None, cx))
477                .await
478        })
479        .detach_and_log_err(cx);
480    }
481
482    fn should_render_editor(&self, cx: &Context<Self>) -> bool {
483        !self.state.read(cx).is_authenticated()
484    }
485}
486
487impl Render for ConfigurationView {
488    fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
489        let state = self.state.read(cx);
490        let env_var_set = state.api_key_state.is_from_env_var();
491        let env_var_name = state.api_key_state.env_var_name();
492
493        let api_key_section = if self.should_render_editor(cx) {
494            v_flex()
495                .on_action(cx.listener(Self::save_api_key))
496                .child(Label::new("To use Zed's agent with an OpenAI-compatible provider, you need to add an API key."))
497                .child(
498                    div()
499                        .pt(DynamicSpacing::Base04.rems(cx))
500                        .child(self.api_key_editor.clone())
501                )
502                .child(
503                    Label::new(
504                        format!("You can also set the {env_var_name} environment variable and restart Zed."),
505                    )
506                    .size(LabelSize::Small).color(Color::Muted),
507                )
508                .into_any()
509        } else {
510            h_flex()
511                .mt_1()
512                .p_1()
513                .justify_between()
514                .rounded_md()
515                .border_1()
516                .border_color(cx.theme().colors().border)
517                .bg(cx.theme().colors().background)
518                .child(
519                    h_flex()
520                        .flex_1()
521                        .min_w_0()
522                        .gap_1()
523                        .child(Icon::new(IconName::Check).color(Color::Success))
524                        .child(
525                            div()
526                                .w_full()
527                                .overflow_x_hidden()
528                                .text_ellipsis()
529                                .child(Label::new(
530                                    if env_var_set {
531                                        format!("API key set in {env_var_name} environment variable")
532                                    } else {
533                                        format!("API key configured for {}", &state.settings.api_url)
534                                    }
535                                ))
536                        ),
537                )
538                .child(
539                    h_flex()
540                        .flex_shrink_0()
541                        .child(
542                            Button::new("reset-api-key", "Reset API Key")
543                                .label_size(LabelSize::Small)
544                                .icon(IconName::Undo)
545                                .icon_size(IconSize::Small)
546                                .icon_position(IconPosition::Start)
547                                .layer(ElevationIndex::ModalSurface)
548                                .when(env_var_set, |this| {
549                                    this.tooltip(Tooltip::text(format!("To reset your API key, unset the {env_var_name} environment variable.")))
550                                })
551                                .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx))),
552                        ),
553                )
554                .into_any()
555        };
556
557        if self.load_credentials_task.is_some() {
558            div().child(Label::new("Loading credentials…")).into_any()
559        } else {
560            v_flex().size_full().child(api_key_section).into_any()
561        }
562    }
563}