open_ai.rs

  1use anyhow::{anyhow, Result};
  2use collections::BTreeMap;
  3use editor::{Editor, EditorElement, EditorStyle};
  4use futures::{future::BoxFuture, FutureExt, StreamExt};
  5use gpui::{
  6    AnyView, AppContext, AsyncAppContext, FontStyle, ModelContext, Subscription, Task, TextStyle,
  7    View, WhiteSpace,
  8};
  9use http_client::HttpClient;
 10use open_ai::{
 11    stream_completion, FunctionDefinition, ResponseStreamEvent, ToolChoice, ToolDefinition,
 12};
 13use schemars::JsonSchema;
 14use serde::{Deserialize, Serialize};
 15use settings::{Settings, SettingsStore};
 16use std::{sync::Arc, time::Duration};
 17use strum::IntoEnumIterator;
 18use theme::ThemeSettings;
 19use ui::{prelude::*, Icon, IconName, Tooltip};
 20use util::ResultExt;
 21
 22use crate::{
 23    settings::AllLanguageModelSettings, LanguageModel, LanguageModelId, LanguageModelName,
 24    LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName,
 25    LanguageModelProviderState, LanguageModelRequest, RateLimiter, Role,
 26};
 27
 28const PROVIDER_ID: &str = "openai";
 29const PROVIDER_NAME: &str = "OpenAI";
 30
 31#[derive(Default, Clone, Debug, PartialEq)]
 32pub struct OpenAiSettings {
 33    pub api_url: String,
 34    pub low_speed_timeout: Option<Duration>,
 35    pub available_models: Vec<AvailableModel>,
 36    pub needs_setting_migration: bool,
 37}
 38
 39#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
 40pub struct AvailableModel {
 41    pub name: String,
 42    pub max_tokens: usize,
 43}
 44
 45pub struct OpenAiLanguageModelProvider {
 46    http_client: Arc<dyn HttpClient>,
 47    state: gpui::Model<State>,
 48}
 49
 50pub struct State {
 51    api_key: Option<String>,
 52    api_key_from_env: bool,
 53    _subscription: Subscription,
 54}
 55
 56const OPENAI_API_KEY_VAR: &'static str = "OPENAI_API_KEY";
 57
 58impl State {
 59    fn is_authenticated(&self) -> bool {
 60        self.api_key.is_some()
 61    }
 62
 63    fn reset_api_key(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
 64        let settings = &AllLanguageModelSettings::get_global(cx).openai;
 65        let delete_credentials = cx.delete_credentials(&settings.api_url);
 66        cx.spawn(|this, mut cx| async move {
 67            delete_credentials.await.log_err();
 68            this.update(&mut cx, |this, cx| {
 69                this.api_key = None;
 70                this.api_key_from_env = false;
 71                cx.notify();
 72            })
 73        })
 74    }
 75
 76    fn set_api_key(&mut self, api_key: String, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
 77        let settings = &AllLanguageModelSettings::get_global(cx).openai;
 78        let write_credentials =
 79            cx.write_credentials(&settings.api_url, "Bearer", api_key.as_bytes());
 80
 81        cx.spawn(|this, mut cx| async move {
 82            write_credentials.await?;
 83            this.update(&mut cx, |this, cx| {
 84                this.api_key = Some(api_key);
 85                cx.notify();
 86            })
 87        })
 88    }
 89
 90    fn authenticate(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
 91        if self.is_authenticated() {
 92            Task::ready(Ok(()))
 93        } else {
 94            let api_url = AllLanguageModelSettings::get_global(cx)
 95                .openai
 96                .api_url
 97                .clone();
 98            cx.spawn(|this, mut cx| async move {
 99                let (api_key, from_env) = if let Ok(api_key) = std::env::var(OPENAI_API_KEY_VAR) {
100                    (api_key, true)
101                } else {
102                    let (_, api_key) = cx
103                        .update(|cx| cx.read_credentials(&api_url))?
104                        .await?
105                        .ok_or_else(|| anyhow!("credentials not found"))?;
106                    (String::from_utf8(api_key)?, false)
107                };
108                this.update(&mut cx, |this, cx| {
109                    this.api_key = Some(api_key);
110                    this.api_key_from_env = from_env;
111                    cx.notify();
112                })
113            })
114        }
115    }
116}
117
118impl OpenAiLanguageModelProvider {
119    pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut AppContext) -> Self {
120        let state = cx.new_model(|cx| State {
121            api_key: None,
122            api_key_from_env: false,
123            _subscription: cx.observe_global::<SettingsStore>(|_this: &mut State, cx| {
124                cx.notify();
125            }),
126        });
127
128        Self { http_client, state }
129    }
130}
131
132impl LanguageModelProviderState for OpenAiLanguageModelProvider {
133    type ObservableEntity = State;
134
135    fn observable_entity(&self) -> Option<gpui::Model<Self::ObservableEntity>> {
136        Some(self.state.clone())
137    }
138}
139
140impl LanguageModelProvider for OpenAiLanguageModelProvider {
141    fn id(&self) -> LanguageModelProviderId {
142        LanguageModelProviderId(PROVIDER_ID.into())
143    }
144
145    fn name(&self) -> LanguageModelProviderName {
146        LanguageModelProviderName(PROVIDER_NAME.into())
147    }
148
149    fn icon(&self) -> IconName {
150        IconName::AiOpenAi
151    }
152
153    fn provided_models(&self, cx: &AppContext) -> Vec<Arc<dyn LanguageModel>> {
154        let mut models = BTreeMap::default();
155
156        // Add base models from open_ai::Model::iter()
157        for model in open_ai::Model::iter() {
158            if !matches!(model, open_ai::Model::Custom { .. }) {
159                models.insert(model.id().to_string(), model);
160            }
161        }
162
163        // Override with available models from settings
164        for model in &AllLanguageModelSettings::get_global(cx)
165            .openai
166            .available_models
167        {
168            models.insert(
169                model.name.clone(),
170                open_ai::Model::Custom {
171                    name: model.name.clone(),
172                    max_tokens: model.max_tokens,
173                },
174            );
175        }
176
177        models
178            .into_values()
179            .map(|model| {
180                Arc::new(OpenAiLanguageModel {
181                    id: LanguageModelId::from(model.id().to_string()),
182                    model,
183                    state: self.state.clone(),
184                    http_client: self.http_client.clone(),
185                    request_limiter: RateLimiter::new(4),
186                }) as Arc<dyn LanguageModel>
187            })
188            .collect()
189    }
190
191    fn is_authenticated(&self, cx: &AppContext) -> bool {
192        self.state.read(cx).is_authenticated()
193    }
194
195    fn authenticate(&self, cx: &mut AppContext) -> Task<Result<()>> {
196        self.state.update(cx, |state, cx| state.authenticate(cx))
197    }
198
199    fn configuration_view(&self, cx: &mut WindowContext) -> AnyView {
200        cx.new_view(|cx| ConfigurationView::new(self.state.clone(), cx))
201            .into()
202    }
203
204    fn reset_credentials(&self, cx: &mut AppContext) -> Task<Result<()>> {
205        self.state.update(cx, |state, cx| state.reset_api_key(cx))
206    }
207}
208
209pub struct OpenAiLanguageModel {
210    id: LanguageModelId,
211    model: open_ai::Model,
212    state: gpui::Model<State>,
213    http_client: Arc<dyn HttpClient>,
214    request_limiter: RateLimiter,
215}
216
217impl OpenAiLanguageModel {
218    fn stream_completion(
219        &self,
220        request: open_ai::Request,
221        cx: &AsyncAppContext,
222    ) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<ResponseStreamEvent>>>>
223    {
224        let http_client = self.http_client.clone();
225        let Ok((api_key, api_url, low_speed_timeout)) = cx.read_model(&self.state, |state, cx| {
226            let settings = &AllLanguageModelSettings::get_global(cx).openai;
227            (
228                state.api_key.clone(),
229                settings.api_url.clone(),
230                settings.low_speed_timeout,
231            )
232        }) else {
233            return futures::future::ready(Err(anyhow!("App state dropped"))).boxed();
234        };
235
236        let future = self.request_limiter.stream(async move {
237            let api_key = api_key.ok_or_else(|| anyhow!("missing api key"))?;
238            let request = stream_completion(
239                http_client.as_ref(),
240                &api_url,
241                &api_key,
242                request,
243                low_speed_timeout,
244            );
245            let response = request.await?;
246            Ok(response)
247        });
248
249        async move { Ok(future.await?.boxed()) }.boxed()
250    }
251}
252
253impl LanguageModel for OpenAiLanguageModel {
254    fn id(&self) -> LanguageModelId {
255        self.id.clone()
256    }
257
258    fn name(&self) -> LanguageModelName {
259        LanguageModelName::from(self.model.display_name().to_string())
260    }
261
262    fn provider_id(&self) -> LanguageModelProviderId {
263        LanguageModelProviderId(PROVIDER_ID.into())
264    }
265
266    fn provider_name(&self) -> LanguageModelProviderName {
267        LanguageModelProviderName(PROVIDER_NAME.into())
268    }
269
270    fn telemetry_id(&self) -> String {
271        format!("openai/{}", self.model.id())
272    }
273
274    fn max_token_count(&self) -> usize {
275        self.model.max_token_count()
276    }
277
278    fn count_tokens(
279        &self,
280        request: LanguageModelRequest,
281        cx: &AppContext,
282    ) -> BoxFuture<'static, Result<usize>> {
283        count_open_ai_tokens(request, self.model.clone(), cx)
284    }
285
286    fn stream_completion(
287        &self,
288        request: LanguageModelRequest,
289        cx: &AsyncAppContext,
290    ) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<String>>>> {
291        let request = request.into_open_ai(self.model.id().into());
292        let completions = self.stream_completion(request, cx);
293        async move { Ok(open_ai::extract_text_from_events(completions.await?).boxed()) }.boxed()
294    }
295
296    fn use_any_tool(
297        &self,
298        request: LanguageModelRequest,
299        tool_name: String,
300        tool_description: String,
301        schema: serde_json::Value,
302        cx: &AsyncAppContext,
303    ) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<String>>>> {
304        let mut request = request.into_open_ai(self.model.id().into());
305        request.tool_choice = Some(ToolChoice::Other(ToolDefinition::Function {
306            function: FunctionDefinition {
307                name: tool_name.clone(),
308                description: None,
309                parameters: None,
310            },
311        }));
312        request.tools = vec![ToolDefinition::Function {
313            function: FunctionDefinition {
314                name: tool_name.clone(),
315                description: Some(tool_description),
316                parameters: Some(schema),
317            },
318        }];
319
320        let response = self.stream_completion(request, cx);
321        self.request_limiter
322            .run(async move {
323                let response = response.await?;
324                Ok(
325                    open_ai::extract_tool_args_from_events(tool_name, Box::pin(response))
326                        .await?
327                        .boxed(),
328                )
329            })
330            .boxed()
331    }
332}
333
334pub fn count_open_ai_tokens(
335    request: LanguageModelRequest,
336    model: open_ai::Model,
337    cx: &AppContext,
338) -> BoxFuture<'static, Result<usize>> {
339    cx.background_executor()
340        .spawn(async move {
341            let messages = request
342                .messages
343                .into_iter()
344                .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
345                    role: match message.role {
346                        Role::User => "user".into(),
347                        Role::Assistant => "assistant".into(),
348                        Role::System => "system".into(),
349                    },
350                    content: Some(message.string_contents()),
351                    name: None,
352                    function_call: None,
353                })
354                .collect::<Vec<_>>();
355
356            if let open_ai::Model::Custom { .. } = model {
357                tiktoken_rs::num_tokens_from_messages("gpt-4", &messages)
358            } else {
359                tiktoken_rs::num_tokens_from_messages(model.id(), &messages)
360            }
361        })
362        .boxed()
363}
364
365struct ConfigurationView {
366    api_key_editor: View<Editor>,
367    state: gpui::Model<State>,
368    load_credentials_task: Option<Task<()>>,
369}
370
371impl ConfigurationView {
372    fn new(state: gpui::Model<State>, cx: &mut ViewContext<Self>) -> Self {
373        let api_key_editor = cx.new_view(|cx| {
374            let mut editor = Editor::single_line(cx);
375            editor.set_placeholder_text("sk-000000000000000000000000000000000000000000000000", cx);
376            editor
377        });
378
379        cx.observe(&state, |_, _, cx| {
380            cx.notify();
381        })
382        .detach();
383
384        let load_credentials_task = Some(cx.spawn({
385            let state = state.clone();
386            |this, mut cx| async move {
387                if let Some(task) = state
388                    .update(&mut cx, |state, cx| state.authenticate(cx))
389                    .log_err()
390                {
391                    // We don't log an error, because "not signed in" is also an error.
392                    let _ = task.await;
393                }
394
395                this.update(&mut cx, |this, cx| {
396                    this.load_credentials_task = None;
397                    cx.notify();
398                })
399                .log_err();
400            }
401        }));
402
403        Self {
404            api_key_editor,
405            state,
406            load_credentials_task,
407        }
408    }
409
410    fn save_api_key(&mut self, _: &menu::Confirm, cx: &mut ViewContext<Self>) {
411        let api_key = self.api_key_editor.read(cx).text(cx);
412        if api_key.is_empty() {
413            return;
414        }
415
416        let state = self.state.clone();
417        cx.spawn(|_, mut cx| async move {
418            state
419                .update(&mut cx, |state, cx| state.set_api_key(api_key, cx))?
420                .await
421        })
422        .detach_and_log_err(cx);
423
424        cx.notify();
425    }
426
427    fn reset_api_key(&mut self, cx: &mut ViewContext<Self>) {
428        self.api_key_editor
429            .update(cx, |editor, cx| editor.set_text("", cx));
430
431        let state = self.state.clone();
432        cx.spawn(|_, mut cx| async move {
433            state
434                .update(&mut cx, |state, cx| state.reset_api_key(cx))?
435                .await
436        })
437        .detach_and_log_err(cx);
438
439        cx.notify();
440    }
441
442    fn render_api_key_editor(&self, cx: &mut ViewContext<Self>) -> impl IntoElement {
443        let settings = ThemeSettings::get_global(cx);
444        let text_style = TextStyle {
445            color: cx.theme().colors().text,
446            font_family: settings.ui_font.family.clone(),
447            font_features: settings.ui_font.features.clone(),
448            font_fallbacks: settings.ui_font.fallbacks.clone(),
449            font_size: rems(0.875).into(),
450            font_weight: settings.ui_font.weight,
451            font_style: FontStyle::Normal,
452            line_height: relative(1.3),
453            background_color: None,
454            underline: None,
455            strikethrough: None,
456            white_space: WhiteSpace::Normal,
457        };
458        EditorElement::new(
459            &self.api_key_editor,
460            EditorStyle {
461                background: cx.theme().colors().editor_background,
462                local_player: cx.theme().players().local(),
463                text: text_style,
464                ..Default::default()
465            },
466        )
467    }
468
469    fn should_render_editor(&self, cx: &mut ViewContext<Self>) -> bool {
470        !self.state.read(cx).is_authenticated()
471    }
472}
473
474impl Render for ConfigurationView {
475    fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
476        const INSTRUCTIONS: [&str; 6] = [
477            "To use the assistant panel or inline assistant, you need to add your OpenAI API key.",
478            " - You can create an API key at: platform.openai.com/api-keys",
479            " - Make sure your OpenAI account has credits",
480            " - Having a subscription for another service like GitHub Copilot won't work.",
481            "",
482            "Paste your OpenAI API key below and hit enter to use the assistant:",
483        ];
484
485        let env_var_set = self.state.read(cx).api_key_from_env;
486
487        if self.load_credentials_task.is_some() {
488            div().child(Label::new("Loading credentials...")).into_any()
489        } else if self.should_render_editor(cx) {
490            v_flex()
491                .size_full()
492                .on_action(cx.listener(Self::save_api_key))
493                .children(
494                    INSTRUCTIONS.map(|instruction| Label::new(instruction)),
495                )
496                .child(
497                    h_flex()
498                        .w_full()
499                        .my_2()
500                        .px_2()
501                        .py_1()
502                        .bg(cx.theme().colors().editor_background)
503                        .rounded_md()
504                        .child(self.render_api_key_editor(cx)),
505                )
506                .child(
507                    Label::new(
508                        format!("You can also assign the {OPENAI_API_KEY_VAR} environment variable and restart Zed."),
509                    )
510                    .size(LabelSize::Small),
511                )
512                .into_any()
513        } else {
514            h_flex()
515                .size_full()
516                .justify_between()
517                .child(
518                    h_flex()
519                        .gap_1()
520                        .child(Icon::new(IconName::Check).color(Color::Success))
521                        .child(Label::new(if env_var_set {
522                            format!("API key set in {OPENAI_API_KEY_VAR} environment variable.")
523                        } else {
524                            "API key configured.".to_string()
525                        })),
526                )
527                .child(
528                    Button::new("reset-key", "Reset key")
529                        .icon(Some(IconName::Trash))
530                        .icon_size(IconSize::Small)
531                        .icon_position(IconPosition::Start)
532                        .disabled(env_var_set)
533                        .when(env_var_set, |this| {
534                            this.tooltip(|cx| Tooltip::text(format!("To reset your API key, unset the {OPENAI_API_KEY_VAR} environment variable."), cx))
535                        })
536                        .on_click(cx.listener(|this, _, cx| this.reset_api_key(cx))),
537                )
538                .into_any()
539        }
540    }
541}