open_ai.rs

  1use anyhow::{anyhow, Result};
  2use collections::BTreeMap;
  3use editor::{Editor, EditorElement, EditorStyle};
  4use futures::{future::BoxFuture, FutureExt, StreamExt};
  5use gpui::{
  6    AnyView, AppContext, AsyncAppContext, FontStyle, ModelContext, Subscription, Task, TextStyle,
  7    View, WhiteSpace,
  8};
  9use http_client::HttpClient;
 10use open_ai::{
 11    stream_completion, FunctionDefinition, ResponseStreamEvent, ToolChoice, ToolDefinition,
 12};
 13use schemars::JsonSchema;
 14use serde::{Deserialize, Serialize};
 15use settings::{Settings, SettingsStore};
 16use std::{sync::Arc, time::Duration};
 17use strum::IntoEnumIterator;
 18use theme::ThemeSettings;
 19use ui::{prelude::*, Icon, IconName, Tooltip};
 20use util::ResultExt;
 21
 22use crate::LanguageModelCompletionEvent;
 23use crate::{
 24    settings::AllLanguageModelSettings, LanguageModel, LanguageModelId, LanguageModelName,
 25    LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName,
 26    LanguageModelProviderState, LanguageModelRequest, RateLimiter, Role,
 27};
 28
 29const PROVIDER_ID: &str = "openai";
 30const PROVIDER_NAME: &str = "OpenAI";
 31
 32#[derive(Default, Clone, Debug, PartialEq)]
 33pub struct OpenAiSettings {
 34    pub api_url: String,
 35    pub low_speed_timeout: Option<Duration>,
 36    pub available_models: Vec<AvailableModel>,
 37    pub needs_setting_migration: bool,
 38}
 39
 40#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
 41pub struct AvailableModel {
 42    pub name: String,
 43    pub display_name: Option<String>,
 44    pub max_tokens: usize,
 45    pub max_output_tokens: Option<u32>,
 46}
 47
 48pub struct OpenAiLanguageModelProvider {
 49    http_client: Arc<dyn HttpClient>,
 50    state: gpui::Model<State>,
 51}
 52
 53pub struct State {
 54    api_key: Option<String>,
 55    api_key_from_env: bool,
 56    _subscription: Subscription,
 57}
 58
 59const OPENAI_API_KEY_VAR: &str = "OPENAI_API_KEY";
 60
 61impl State {
 62    fn is_authenticated(&self) -> bool {
 63        self.api_key.is_some()
 64    }
 65
 66    fn reset_api_key(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
 67        let settings = &AllLanguageModelSettings::get_global(cx).openai;
 68        let delete_credentials = cx.delete_credentials(&settings.api_url);
 69        cx.spawn(|this, mut cx| async move {
 70            delete_credentials.await.log_err();
 71            this.update(&mut cx, |this, cx| {
 72                this.api_key = None;
 73                this.api_key_from_env = false;
 74                cx.notify();
 75            })
 76        })
 77    }
 78
 79    fn set_api_key(&mut self, api_key: String, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
 80        let settings = &AllLanguageModelSettings::get_global(cx).openai;
 81        let write_credentials =
 82            cx.write_credentials(&settings.api_url, "Bearer", api_key.as_bytes());
 83
 84        cx.spawn(|this, mut cx| async move {
 85            write_credentials.await?;
 86            this.update(&mut cx, |this, cx| {
 87                this.api_key = Some(api_key);
 88                cx.notify();
 89            })
 90        })
 91    }
 92
 93    fn authenticate(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
 94        if self.is_authenticated() {
 95            Task::ready(Ok(()))
 96        } else {
 97            let api_url = AllLanguageModelSettings::get_global(cx)
 98                .openai
 99                .api_url
100                .clone();
101            cx.spawn(|this, mut cx| async move {
102                let (api_key, from_env) = if let Ok(api_key) = std::env::var(OPENAI_API_KEY_VAR) {
103                    (api_key, true)
104                } else {
105                    let (_, api_key) = cx
106                        .update(|cx| cx.read_credentials(&api_url))?
107                        .await?
108                        .ok_or_else(|| anyhow!("credentials not found"))?;
109                    (String::from_utf8(api_key)?, false)
110                };
111                this.update(&mut cx, |this, cx| {
112                    this.api_key = Some(api_key);
113                    this.api_key_from_env = from_env;
114                    cx.notify();
115                })
116            })
117        }
118    }
119}
120
121impl OpenAiLanguageModelProvider {
122    pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut AppContext) -> Self {
123        let state = cx.new_model(|cx| State {
124            api_key: None,
125            api_key_from_env: false,
126            _subscription: cx.observe_global::<SettingsStore>(|_this: &mut State, cx| {
127                cx.notify();
128            }),
129        });
130
131        Self { http_client, state }
132    }
133}
134
135impl LanguageModelProviderState for OpenAiLanguageModelProvider {
136    type ObservableEntity = State;
137
138    fn observable_entity(&self) -> Option<gpui::Model<Self::ObservableEntity>> {
139        Some(self.state.clone())
140    }
141}
142
143impl LanguageModelProvider for OpenAiLanguageModelProvider {
144    fn id(&self) -> LanguageModelProviderId {
145        LanguageModelProviderId(PROVIDER_ID.into())
146    }
147
148    fn name(&self) -> LanguageModelProviderName {
149        LanguageModelProviderName(PROVIDER_NAME.into())
150    }
151
152    fn icon(&self) -> IconName {
153        IconName::AiOpenAi
154    }
155
156    fn provided_models(&self, cx: &AppContext) -> Vec<Arc<dyn LanguageModel>> {
157        let mut models = BTreeMap::default();
158
159        // Add base models from open_ai::Model::iter()
160        for model in open_ai::Model::iter() {
161            if !matches!(model, open_ai::Model::Custom { .. }) {
162                models.insert(model.id().to_string(), model);
163            }
164        }
165
166        // Override with available models from settings
167        for model in &AllLanguageModelSettings::get_global(cx)
168            .openai
169            .available_models
170        {
171            models.insert(
172                model.name.clone(),
173                open_ai::Model::Custom {
174                    name: model.name.clone(),
175                    display_name: model.display_name.clone(),
176                    max_tokens: model.max_tokens,
177                    max_output_tokens: model.max_output_tokens,
178                },
179            );
180        }
181
182        models
183            .into_values()
184            .map(|model| {
185                Arc::new(OpenAiLanguageModel {
186                    id: LanguageModelId::from(model.id().to_string()),
187                    model,
188                    state: self.state.clone(),
189                    http_client: self.http_client.clone(),
190                    request_limiter: RateLimiter::new(4),
191                }) as Arc<dyn LanguageModel>
192            })
193            .collect()
194    }
195
196    fn is_authenticated(&self, cx: &AppContext) -> bool {
197        self.state.read(cx).is_authenticated()
198    }
199
200    fn authenticate(&self, cx: &mut AppContext) -> Task<Result<()>> {
201        self.state.update(cx, |state, cx| state.authenticate(cx))
202    }
203
204    fn configuration_view(&self, cx: &mut WindowContext) -> AnyView {
205        cx.new_view(|cx| ConfigurationView::new(self.state.clone(), cx))
206            .into()
207    }
208
209    fn reset_credentials(&self, cx: &mut AppContext) -> Task<Result<()>> {
210        self.state.update(cx, |state, cx| state.reset_api_key(cx))
211    }
212}
213
214pub struct OpenAiLanguageModel {
215    id: LanguageModelId,
216    model: open_ai::Model,
217    state: gpui::Model<State>,
218    http_client: Arc<dyn HttpClient>,
219    request_limiter: RateLimiter,
220}
221
222impl OpenAiLanguageModel {
223    fn stream_completion(
224        &self,
225        request: open_ai::Request,
226        cx: &AsyncAppContext,
227    ) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<ResponseStreamEvent>>>>
228    {
229        let http_client = self.http_client.clone();
230        let Ok((api_key, api_url, low_speed_timeout)) = cx.read_model(&self.state, |state, cx| {
231            let settings = &AllLanguageModelSettings::get_global(cx).openai;
232            (
233                state.api_key.clone(),
234                settings.api_url.clone(),
235                settings.low_speed_timeout,
236            )
237        }) else {
238            return futures::future::ready(Err(anyhow!("App state dropped"))).boxed();
239        };
240
241        let future = self.request_limiter.stream(async move {
242            let api_key = api_key.ok_or_else(|| anyhow!("missing api key"))?;
243            let request = stream_completion(
244                http_client.as_ref(),
245                &api_url,
246                &api_key,
247                request,
248                low_speed_timeout,
249            );
250            let response = request.await?;
251            Ok(response)
252        });
253
254        async move { Ok(future.await?.boxed()) }.boxed()
255    }
256}
257
258impl LanguageModel for OpenAiLanguageModel {
259    fn id(&self) -> LanguageModelId {
260        self.id.clone()
261    }
262
263    fn name(&self) -> LanguageModelName {
264        LanguageModelName::from(self.model.display_name().to_string())
265    }
266
267    fn provider_id(&self) -> LanguageModelProviderId {
268        LanguageModelProviderId(PROVIDER_ID.into())
269    }
270
271    fn provider_name(&self) -> LanguageModelProviderName {
272        LanguageModelProviderName(PROVIDER_NAME.into())
273    }
274
275    fn telemetry_id(&self) -> String {
276        format!("openai/{}", self.model.id())
277    }
278
279    fn max_token_count(&self) -> usize {
280        self.model.max_token_count()
281    }
282
283    fn max_output_tokens(&self) -> Option<u32> {
284        self.model.max_output_tokens()
285    }
286
287    fn count_tokens(
288        &self,
289        request: LanguageModelRequest,
290        cx: &AppContext,
291    ) -> BoxFuture<'static, Result<usize>> {
292        count_open_ai_tokens(request, self.model.clone(), cx)
293    }
294
295    fn stream_completion(
296        &self,
297        request: LanguageModelRequest,
298        cx: &AsyncAppContext,
299    ) -> BoxFuture<
300        'static,
301        Result<futures::stream::BoxStream<'static, Result<LanguageModelCompletionEvent>>>,
302    > {
303        let request = request.into_open_ai(self.model.id().into(), self.max_output_tokens());
304        let completions = self.stream_completion(request, cx);
305        async move {
306            Ok(open_ai::extract_text_from_events(completions.await?)
307                .map(|result| result.map(LanguageModelCompletionEvent::Text))
308                .boxed())
309        }
310        .boxed()
311    }
312
313    fn use_any_tool(
314        &self,
315        request: LanguageModelRequest,
316        tool_name: String,
317        tool_description: String,
318        schema: serde_json::Value,
319        cx: &AsyncAppContext,
320    ) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<String>>>> {
321        let mut request = request.into_open_ai(self.model.id().into(), self.max_output_tokens());
322        request.tool_choice = Some(ToolChoice::Other(ToolDefinition::Function {
323            function: FunctionDefinition {
324                name: tool_name.clone(),
325                description: None,
326                parameters: None,
327            },
328        }));
329        request.tools = vec![ToolDefinition::Function {
330            function: FunctionDefinition {
331                name: tool_name.clone(),
332                description: Some(tool_description),
333                parameters: Some(schema),
334            },
335        }];
336
337        let response = self.stream_completion(request, cx);
338        self.request_limiter
339            .run(async move {
340                let response = response.await?;
341                Ok(
342                    open_ai::extract_tool_args_from_events(tool_name, Box::pin(response))
343                        .await?
344                        .boxed(),
345                )
346            })
347            .boxed()
348    }
349}
350
351pub fn count_open_ai_tokens(
352    request: LanguageModelRequest,
353    model: open_ai::Model,
354    cx: &AppContext,
355) -> BoxFuture<'static, Result<usize>> {
356    cx.background_executor()
357        .spawn(async move {
358            let messages = request
359                .messages
360                .into_iter()
361                .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
362                    role: match message.role {
363                        Role::User => "user".into(),
364                        Role::Assistant => "assistant".into(),
365                        Role::System => "system".into(),
366                    },
367                    content: Some(message.string_contents()),
368                    name: None,
369                    function_call: None,
370                })
371                .collect::<Vec<_>>();
372
373            if let open_ai::Model::Custom { .. } = model {
374                tiktoken_rs::num_tokens_from_messages("gpt-4", &messages)
375            } else {
376                tiktoken_rs::num_tokens_from_messages(model.id(), &messages)
377            }
378        })
379        .boxed()
380}
381
382struct ConfigurationView {
383    api_key_editor: View<Editor>,
384    state: gpui::Model<State>,
385    load_credentials_task: Option<Task<()>>,
386}
387
388impl ConfigurationView {
389    fn new(state: gpui::Model<State>, cx: &mut ViewContext<Self>) -> Self {
390        let api_key_editor = cx.new_view(|cx| {
391            let mut editor = Editor::single_line(cx);
392            editor.set_placeholder_text("sk-000000000000000000000000000000000000000000000000", cx);
393            editor
394        });
395
396        cx.observe(&state, |_, _, cx| {
397            cx.notify();
398        })
399        .detach();
400
401        let load_credentials_task = Some(cx.spawn({
402            let state = state.clone();
403            |this, mut cx| async move {
404                if let Some(task) = state
405                    .update(&mut cx, |state, cx| state.authenticate(cx))
406                    .log_err()
407                {
408                    // We don't log an error, because "not signed in" is also an error.
409                    let _ = task.await;
410                }
411
412                this.update(&mut cx, |this, cx| {
413                    this.load_credentials_task = None;
414                    cx.notify();
415                })
416                .log_err();
417            }
418        }));
419
420        Self {
421            api_key_editor,
422            state,
423            load_credentials_task,
424        }
425    }
426
427    fn save_api_key(&mut self, _: &menu::Confirm, cx: &mut ViewContext<Self>) {
428        let api_key = self.api_key_editor.read(cx).text(cx);
429        if api_key.is_empty() {
430            return;
431        }
432
433        let state = self.state.clone();
434        cx.spawn(|_, mut cx| async move {
435            state
436                .update(&mut cx, |state, cx| state.set_api_key(api_key, cx))?
437                .await
438        })
439        .detach_and_log_err(cx);
440
441        cx.notify();
442    }
443
444    fn reset_api_key(&mut self, cx: &mut ViewContext<Self>) {
445        self.api_key_editor
446            .update(cx, |editor, cx| editor.set_text("", cx));
447
448        let state = self.state.clone();
449        cx.spawn(|_, mut cx| async move {
450            state
451                .update(&mut cx, |state, cx| state.reset_api_key(cx))?
452                .await
453        })
454        .detach_and_log_err(cx);
455
456        cx.notify();
457    }
458
459    fn render_api_key_editor(&self, cx: &mut ViewContext<Self>) -> impl IntoElement {
460        let settings = ThemeSettings::get_global(cx);
461        let text_style = TextStyle {
462            color: cx.theme().colors().text,
463            font_family: settings.ui_font.family.clone(),
464            font_features: settings.ui_font.features.clone(),
465            font_fallbacks: settings.ui_font.fallbacks.clone(),
466            font_size: rems(0.875).into(),
467            font_weight: settings.ui_font.weight,
468            font_style: FontStyle::Normal,
469            line_height: relative(1.3),
470            background_color: None,
471            underline: None,
472            strikethrough: None,
473            white_space: WhiteSpace::Normal,
474            truncate: None,
475        };
476        EditorElement::new(
477            &self.api_key_editor,
478            EditorStyle {
479                background: cx.theme().colors().editor_background,
480                local_player: cx.theme().players().local(),
481                text: text_style,
482                ..Default::default()
483            },
484        )
485    }
486
487    fn should_render_editor(&self, cx: &mut ViewContext<Self>) -> bool {
488        !self.state.read(cx).is_authenticated()
489    }
490}
491
492impl Render for ConfigurationView {
493    fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
494        const OPENAI_CONSOLE_URL: &str = "https://platform.openai.com/api-keys";
495        const INSTRUCTIONS: [&str; 6] = [
496            "To use the assistant panel or inline assistant, you need to add your OpenAI API key.",
497            " - You can create an API key at: ",
498            " - Make sure your OpenAI account has credits",
499            " - Having a subscription for another service like GitHub Copilot won't work.",
500            "",
501            "Paste your OpenAI API key below and hit enter to use the assistant:",
502        ];
503
504        let env_var_set = self.state.read(cx).api_key_from_env;
505
506        if self.load_credentials_task.is_some() {
507            div().child(Label::new("Loading credentials...")).into_any()
508        } else if self.should_render_editor(cx) {
509            v_flex()
510                .size_full()
511                .on_action(cx.listener(Self::save_api_key))
512                .child(Label::new(INSTRUCTIONS[0]))
513                .child(h_flex().child(Label::new(INSTRUCTIONS[1])).child(
514                    Button::new("openai_console", OPENAI_CONSOLE_URL)
515                        .style(ButtonStyle::Subtle)
516                        .icon(IconName::ExternalLink)
517                        .icon_size(IconSize::XSmall)
518                        .icon_color(Color::Muted)
519                        .on_click(move |_, cx| cx.open_url(OPENAI_CONSOLE_URL))
520                    )
521                )
522                .children(
523                    (2..INSTRUCTIONS.len()).map(|n|
524                        Label::new(INSTRUCTIONS[n])).collect::<Vec<_>>())
525                .child(
526                    h_flex()
527                        .w_full()
528                        .my_2()
529                        .px_2()
530                        .py_1()
531                        .bg(cx.theme().colors().editor_background)
532                        .rounded_md()
533                        .child(self.render_api_key_editor(cx)),
534                )
535                .child(
536                    Label::new(
537                        format!("You can also assign the {OPENAI_API_KEY_VAR} environment variable and restart Zed."),
538                    )
539                    .size(LabelSize::Small),
540                )
541                .into_any()
542        } else {
543            h_flex()
544                .size_full()
545                .justify_between()
546                .child(
547                    h_flex()
548                        .gap_1()
549                        .child(Icon::new(IconName::Check).color(Color::Success))
550                        .child(Label::new(if env_var_set {
551                            format!("API key set in {OPENAI_API_KEY_VAR} environment variable.")
552                        } else {
553                            "API key configured.".to_string()
554                        })),
555                )
556                .child(
557                    Button::new("reset-key", "Reset key")
558                        .icon(Some(IconName::Trash))
559                        .icon_size(IconSize::Small)
560                        .icon_position(IconPosition::Start)
561                        .disabled(env_var_set)
562                        .when(env_var_set, |this| {
563                            this.tooltip(|cx| Tooltip::text(format!("To reset your API key, unset the {OPENAI_API_KEY_VAR} environment variable."), cx))
564                        })
565                        .on_click(cx.listener(|this, _, cx| this.reset_api_key(cx))),
566                )
567                .into_any()
568        }
569    }
570}