open_ai.rs

  1use anyhow::{anyhow, Result};
  2use collections::BTreeMap;
  3use editor::{Editor, EditorElement, EditorStyle};
  4use futures::{future::BoxFuture, FutureExt, StreamExt};
  5use gpui::{
  6    AnyView, AppContext, AsyncAppContext, FontStyle, ModelContext, Subscription, Task, TextStyle,
  7    View, WhiteSpace,
  8};
  9use http_client::HttpClient;
 10use open_ai::{
 11    stream_completion, FunctionDefinition, ResponseStreamEvent, ToolChoice, ToolDefinition,
 12};
 13use schemars::JsonSchema;
 14use serde::{Deserialize, Serialize};
 15use settings::{Settings, SettingsStore};
 16use std::{sync::Arc, time::Duration};
 17use strum::IntoEnumIterator;
 18use theme::ThemeSettings;
 19use ui::{prelude::*, Icon, IconName, Tooltip};
 20use util::ResultExt;
 21
 22use crate::{
 23    settings::AllLanguageModelSettings, LanguageModel, LanguageModelId, LanguageModelName,
 24    LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName,
 25    LanguageModelProviderState, LanguageModelRequest, RateLimiter, Role,
 26};
 27
 28const PROVIDER_ID: &str = "openai";
 29const PROVIDER_NAME: &str = "OpenAI";
 30
 31#[derive(Default, Clone, Debug, PartialEq)]
 32pub struct OpenAiSettings {
 33    pub api_url: String,
 34    pub low_speed_timeout: Option<Duration>,
 35    pub available_models: Vec<AvailableModel>,
 36    pub needs_setting_migration: bool,
 37}
 38
 39#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
 40pub struct AvailableModel {
 41    pub name: String,
 42    pub max_tokens: usize,
 43    pub max_output_tokens: Option<u32>,
 44}
 45
 46pub struct OpenAiLanguageModelProvider {
 47    http_client: Arc<dyn HttpClient>,
 48    state: gpui::Model<State>,
 49}
 50
 51pub struct State {
 52    api_key: Option<String>,
 53    api_key_from_env: bool,
 54    _subscription: Subscription,
 55}
 56
 57const OPENAI_API_KEY_VAR: &'static str = "OPENAI_API_KEY";
 58
 59impl State {
 60    fn is_authenticated(&self) -> bool {
 61        self.api_key.is_some()
 62    }
 63
 64    fn reset_api_key(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
 65        let settings = &AllLanguageModelSettings::get_global(cx).openai;
 66        let delete_credentials = cx.delete_credentials(&settings.api_url);
 67        cx.spawn(|this, mut cx| async move {
 68            delete_credentials.await.log_err();
 69            this.update(&mut cx, |this, cx| {
 70                this.api_key = None;
 71                this.api_key_from_env = false;
 72                cx.notify();
 73            })
 74        })
 75    }
 76
 77    fn set_api_key(&mut self, api_key: String, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
 78        let settings = &AllLanguageModelSettings::get_global(cx).openai;
 79        let write_credentials =
 80            cx.write_credentials(&settings.api_url, "Bearer", api_key.as_bytes());
 81
 82        cx.spawn(|this, mut cx| async move {
 83            write_credentials.await?;
 84            this.update(&mut cx, |this, cx| {
 85                this.api_key = Some(api_key);
 86                cx.notify();
 87            })
 88        })
 89    }
 90
 91    fn authenticate(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
 92        if self.is_authenticated() {
 93            Task::ready(Ok(()))
 94        } else {
 95            let api_url = AllLanguageModelSettings::get_global(cx)
 96                .openai
 97                .api_url
 98                .clone();
 99            cx.spawn(|this, mut cx| async move {
100                let (api_key, from_env) = if let Ok(api_key) = std::env::var(OPENAI_API_KEY_VAR) {
101                    (api_key, true)
102                } else {
103                    let (_, api_key) = cx
104                        .update(|cx| cx.read_credentials(&api_url))?
105                        .await?
106                        .ok_or_else(|| anyhow!("credentials not found"))?;
107                    (String::from_utf8(api_key)?, false)
108                };
109                this.update(&mut cx, |this, cx| {
110                    this.api_key = Some(api_key);
111                    this.api_key_from_env = from_env;
112                    cx.notify();
113                })
114            })
115        }
116    }
117}
118
119impl OpenAiLanguageModelProvider {
120    pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut AppContext) -> Self {
121        let state = cx.new_model(|cx| State {
122            api_key: None,
123            api_key_from_env: false,
124            _subscription: cx.observe_global::<SettingsStore>(|_this: &mut State, cx| {
125                cx.notify();
126            }),
127        });
128
129        Self { http_client, state }
130    }
131}
132
133impl LanguageModelProviderState for OpenAiLanguageModelProvider {
134    type ObservableEntity = State;
135
136    fn observable_entity(&self) -> Option<gpui::Model<Self::ObservableEntity>> {
137        Some(self.state.clone())
138    }
139}
140
141impl LanguageModelProvider for OpenAiLanguageModelProvider {
142    fn id(&self) -> LanguageModelProviderId {
143        LanguageModelProviderId(PROVIDER_ID.into())
144    }
145
146    fn name(&self) -> LanguageModelProviderName {
147        LanguageModelProviderName(PROVIDER_NAME.into())
148    }
149
150    fn icon(&self) -> IconName {
151        IconName::AiOpenAi
152    }
153
154    fn provided_models(&self, cx: &AppContext) -> Vec<Arc<dyn LanguageModel>> {
155        let mut models = BTreeMap::default();
156
157        // Add base models from open_ai::Model::iter()
158        for model in open_ai::Model::iter() {
159            if !matches!(model, open_ai::Model::Custom { .. }) {
160                models.insert(model.id().to_string(), model);
161            }
162        }
163
164        // Override with available models from settings
165        for model in &AllLanguageModelSettings::get_global(cx)
166            .openai
167            .available_models
168        {
169            models.insert(
170                model.name.clone(),
171                open_ai::Model::Custom {
172                    name: model.name.clone(),
173                    max_tokens: model.max_tokens,
174                    max_output_tokens: model.max_output_tokens,
175                },
176            );
177        }
178
179        models
180            .into_values()
181            .map(|model| {
182                Arc::new(OpenAiLanguageModel {
183                    id: LanguageModelId::from(model.id().to_string()),
184                    model,
185                    state: self.state.clone(),
186                    http_client: self.http_client.clone(),
187                    request_limiter: RateLimiter::new(4),
188                }) as Arc<dyn LanguageModel>
189            })
190            .collect()
191    }
192
193    fn is_authenticated(&self, cx: &AppContext) -> bool {
194        self.state.read(cx).is_authenticated()
195    }
196
197    fn authenticate(&self, cx: &mut AppContext) -> Task<Result<()>> {
198        self.state.update(cx, |state, cx| state.authenticate(cx))
199    }
200
201    fn configuration_view(&self, cx: &mut WindowContext) -> AnyView {
202        cx.new_view(|cx| ConfigurationView::new(self.state.clone(), cx))
203            .into()
204    }
205
206    fn reset_credentials(&self, cx: &mut AppContext) -> Task<Result<()>> {
207        self.state.update(cx, |state, cx| state.reset_api_key(cx))
208    }
209}
210
211pub struct OpenAiLanguageModel {
212    id: LanguageModelId,
213    model: open_ai::Model,
214    state: gpui::Model<State>,
215    http_client: Arc<dyn HttpClient>,
216    request_limiter: RateLimiter,
217}
218
219impl OpenAiLanguageModel {
220    fn stream_completion(
221        &self,
222        request: open_ai::Request,
223        cx: &AsyncAppContext,
224    ) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<ResponseStreamEvent>>>>
225    {
226        let http_client = self.http_client.clone();
227        let Ok((api_key, api_url, low_speed_timeout)) = cx.read_model(&self.state, |state, cx| {
228            let settings = &AllLanguageModelSettings::get_global(cx).openai;
229            (
230                state.api_key.clone(),
231                settings.api_url.clone(),
232                settings.low_speed_timeout,
233            )
234        }) else {
235            return futures::future::ready(Err(anyhow!("App state dropped"))).boxed();
236        };
237
238        let future = self.request_limiter.stream(async move {
239            let api_key = api_key.ok_or_else(|| anyhow!("missing api key"))?;
240            let request = stream_completion(
241                http_client.as_ref(),
242                &api_url,
243                &api_key,
244                request,
245                low_speed_timeout,
246            );
247            let response = request.await?;
248            Ok(response)
249        });
250
251        async move { Ok(future.await?.boxed()) }.boxed()
252    }
253}
254
255impl LanguageModel for OpenAiLanguageModel {
256    fn id(&self) -> LanguageModelId {
257        self.id.clone()
258    }
259
260    fn name(&self) -> LanguageModelName {
261        LanguageModelName::from(self.model.display_name().to_string())
262    }
263
264    fn provider_id(&self) -> LanguageModelProviderId {
265        LanguageModelProviderId(PROVIDER_ID.into())
266    }
267
268    fn provider_name(&self) -> LanguageModelProviderName {
269        LanguageModelProviderName(PROVIDER_NAME.into())
270    }
271
272    fn telemetry_id(&self) -> String {
273        format!("openai/{}", self.model.id())
274    }
275
276    fn max_token_count(&self) -> usize {
277        self.model.max_token_count()
278    }
279
280    fn max_output_tokens(&self) -> Option<u32> {
281        self.model.max_output_tokens()
282    }
283
284    fn count_tokens(
285        &self,
286        request: LanguageModelRequest,
287        cx: &AppContext,
288    ) -> BoxFuture<'static, Result<usize>> {
289        count_open_ai_tokens(request, self.model.clone(), cx)
290    }
291
292    fn stream_completion(
293        &self,
294        request: LanguageModelRequest,
295        cx: &AsyncAppContext,
296    ) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<String>>>> {
297        let request = request.into_open_ai(self.model.id().into(), self.max_output_tokens());
298        let completions = self.stream_completion(request, cx);
299        async move { Ok(open_ai::extract_text_from_events(completions.await?).boxed()) }.boxed()
300    }
301
302    fn use_any_tool(
303        &self,
304        request: LanguageModelRequest,
305        tool_name: String,
306        tool_description: String,
307        schema: serde_json::Value,
308        cx: &AsyncAppContext,
309    ) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<String>>>> {
310        let mut request = request.into_open_ai(self.model.id().into(), self.max_output_tokens());
311        request.tool_choice = Some(ToolChoice::Other(ToolDefinition::Function {
312            function: FunctionDefinition {
313                name: tool_name.clone(),
314                description: None,
315                parameters: None,
316            },
317        }));
318        request.tools = vec![ToolDefinition::Function {
319            function: FunctionDefinition {
320                name: tool_name.clone(),
321                description: Some(tool_description),
322                parameters: Some(schema),
323            },
324        }];
325
326        let response = self.stream_completion(request, cx);
327        self.request_limiter
328            .run(async move {
329                let response = response.await?;
330                Ok(
331                    open_ai::extract_tool_args_from_events(tool_name, Box::pin(response))
332                        .await?
333                        .boxed(),
334                )
335            })
336            .boxed()
337    }
338}
339
340pub fn count_open_ai_tokens(
341    request: LanguageModelRequest,
342    model: open_ai::Model,
343    cx: &AppContext,
344) -> BoxFuture<'static, Result<usize>> {
345    cx.background_executor()
346        .spawn(async move {
347            let messages = request
348                .messages
349                .into_iter()
350                .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
351                    role: match message.role {
352                        Role::User => "user".into(),
353                        Role::Assistant => "assistant".into(),
354                        Role::System => "system".into(),
355                    },
356                    content: Some(message.string_contents()),
357                    name: None,
358                    function_call: None,
359                })
360                .collect::<Vec<_>>();
361
362            if let open_ai::Model::Custom { .. } = model {
363                tiktoken_rs::num_tokens_from_messages("gpt-4", &messages)
364            } else {
365                tiktoken_rs::num_tokens_from_messages(model.id(), &messages)
366            }
367        })
368        .boxed()
369}
370
371struct ConfigurationView {
372    api_key_editor: View<Editor>,
373    state: gpui::Model<State>,
374    load_credentials_task: Option<Task<()>>,
375}
376
377impl ConfigurationView {
378    fn new(state: gpui::Model<State>, cx: &mut ViewContext<Self>) -> Self {
379        let api_key_editor = cx.new_view(|cx| {
380            let mut editor = Editor::single_line(cx);
381            editor.set_placeholder_text("sk-000000000000000000000000000000000000000000000000", cx);
382            editor
383        });
384
385        cx.observe(&state, |_, _, cx| {
386            cx.notify();
387        })
388        .detach();
389
390        let load_credentials_task = Some(cx.spawn({
391            let state = state.clone();
392            |this, mut cx| async move {
393                if let Some(task) = state
394                    .update(&mut cx, |state, cx| state.authenticate(cx))
395                    .log_err()
396                {
397                    // We don't log an error, because "not signed in" is also an error.
398                    let _ = task.await;
399                }
400
401                this.update(&mut cx, |this, cx| {
402                    this.load_credentials_task = None;
403                    cx.notify();
404                })
405                .log_err();
406            }
407        }));
408
409        Self {
410            api_key_editor,
411            state,
412            load_credentials_task,
413        }
414    }
415
416    fn save_api_key(&mut self, _: &menu::Confirm, cx: &mut ViewContext<Self>) {
417        let api_key = self.api_key_editor.read(cx).text(cx);
418        if api_key.is_empty() {
419            return;
420        }
421
422        let state = self.state.clone();
423        cx.spawn(|_, mut cx| async move {
424            state
425                .update(&mut cx, |state, cx| state.set_api_key(api_key, cx))?
426                .await
427        })
428        .detach_and_log_err(cx);
429
430        cx.notify();
431    }
432
433    fn reset_api_key(&mut self, cx: &mut ViewContext<Self>) {
434        self.api_key_editor
435            .update(cx, |editor, cx| editor.set_text("", cx));
436
437        let state = self.state.clone();
438        cx.spawn(|_, mut cx| async move {
439            state
440                .update(&mut cx, |state, cx| state.reset_api_key(cx))?
441                .await
442        })
443        .detach_and_log_err(cx);
444
445        cx.notify();
446    }
447
448    fn render_api_key_editor(&self, cx: &mut ViewContext<Self>) -> impl IntoElement {
449        let settings = ThemeSettings::get_global(cx);
450        let text_style = TextStyle {
451            color: cx.theme().colors().text,
452            font_family: settings.ui_font.family.clone(),
453            font_features: settings.ui_font.features.clone(),
454            font_fallbacks: settings.ui_font.fallbacks.clone(),
455            font_size: rems(0.875).into(),
456            font_weight: settings.ui_font.weight,
457            font_style: FontStyle::Normal,
458            line_height: relative(1.3),
459            background_color: None,
460            underline: None,
461            strikethrough: None,
462            white_space: WhiteSpace::Normal,
463            truncate: None,
464        };
465        EditorElement::new(
466            &self.api_key_editor,
467            EditorStyle {
468                background: cx.theme().colors().editor_background,
469                local_player: cx.theme().players().local(),
470                text: text_style,
471                ..Default::default()
472            },
473        )
474    }
475
476    fn should_render_editor(&self, cx: &mut ViewContext<Self>) -> bool {
477        !self.state.read(cx).is_authenticated()
478    }
479}
480
481impl Render for ConfigurationView {
482    fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
483        const INSTRUCTIONS: [&str; 6] = [
484            "To use the assistant panel or inline assistant, you need to add your OpenAI API key.",
485            " - You can create an API key at: platform.openai.com/api-keys",
486            " - Make sure your OpenAI account has credits",
487            " - Having a subscription for another service like GitHub Copilot won't work.",
488            "",
489            "Paste your OpenAI API key below and hit enter to use the assistant:",
490        ];
491
492        let env_var_set = self.state.read(cx).api_key_from_env;
493
494        if self.load_credentials_task.is_some() {
495            div().child(Label::new("Loading credentials...")).into_any()
496        } else if self.should_render_editor(cx) {
497            v_flex()
498                .size_full()
499                .on_action(cx.listener(Self::save_api_key))
500                .children(
501                    INSTRUCTIONS.map(|instruction| Label::new(instruction)),
502                )
503                .child(
504                    h_flex()
505                        .w_full()
506                        .my_2()
507                        .px_2()
508                        .py_1()
509                        .bg(cx.theme().colors().editor_background)
510                        .rounded_md()
511                        .child(self.render_api_key_editor(cx)),
512                )
513                .child(
514                    Label::new(
515                        format!("You can also assign the {OPENAI_API_KEY_VAR} environment variable and restart Zed."),
516                    )
517                    .size(LabelSize::Small),
518                )
519                .into_any()
520        } else {
521            h_flex()
522                .size_full()
523                .justify_between()
524                .child(
525                    h_flex()
526                        .gap_1()
527                        .child(Icon::new(IconName::Check).color(Color::Success))
528                        .child(Label::new(if env_var_set {
529                            format!("API key set in {OPENAI_API_KEY_VAR} environment variable.")
530                        } else {
531                            "API key configured.".to_string()
532                        })),
533                )
534                .child(
535                    Button::new("reset-key", "Reset key")
536                        .icon(Some(IconName::Trash))
537                        .icon_size(IconSize::Small)
538                        .icon_position(IconPosition::Start)
539                        .disabled(env_var_set)
540                        .when(env_var_set, |this| {
541                            this.tooltip(|cx| Tooltip::text(format!("To reset your API key, unset the {OPENAI_API_KEY_VAR} environment variable."), cx))
542                        })
543                        .on_click(cx.listener(|this, _, cx| this.reset_api_key(cx))),
544                )
545                .into_any()
546        }
547    }
548}