1use anyhow::{anyhow, Result};
2use collections::BTreeMap;
3use editor::{Editor, EditorElement, EditorStyle};
4use futures::{future::BoxFuture, FutureExt, StreamExt};
5use gpui::{
6 AnyView, AppContext, AsyncAppContext, FocusHandle, FocusableView, FontStyle, ModelContext,
7 Subscription, Task, TextStyle, View, WhiteSpace,
8};
9use http_client::HttpClient;
10use open_ai::stream_completion;
11use schemars::JsonSchema;
12use serde::{Deserialize, Serialize};
13use settings::{Settings, SettingsStore};
14use std::{future, sync::Arc, time::Duration};
15use strum::IntoEnumIterator;
16use theme::ThemeSettings;
17use ui::{prelude::*, Indicator};
18use util::ResultExt;
19
20use crate::{
21 settings::AllLanguageModelSettings, LanguageModel, LanguageModelId, LanguageModelName,
22 LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName,
23 LanguageModelProviderState, LanguageModelRequest, RateLimiter, Role,
24};
25
26const PROVIDER_ID: &str = "openai";
27const PROVIDER_NAME: &str = "OpenAI";
28
29#[derive(Default, Clone, Debug, PartialEq)]
30pub struct OpenAiSettings {
31 pub api_url: String,
32 pub low_speed_timeout: Option<Duration>,
33 pub available_models: Vec<AvailableModel>,
34 pub needs_setting_migration: bool,
35}
36
37#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
38pub struct AvailableModel {
39 pub name: String,
40 pub max_tokens: usize,
41}
42
43pub struct OpenAiLanguageModelProvider {
44 http_client: Arc<dyn HttpClient>,
45 state: gpui::Model<State>,
46}
47
48pub struct State {
49 api_key: Option<String>,
50 _subscription: Subscription,
51}
52
53impl State {
54 fn is_authenticated(&self) -> bool {
55 self.api_key.is_some()
56 }
57
58 fn reset_api_key(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
59 let settings = &AllLanguageModelSettings::get_global(cx).openai;
60 let delete_credentials = cx.delete_credentials(&settings.api_url);
61 cx.spawn(|this, mut cx| async move {
62 delete_credentials.await.log_err();
63 this.update(&mut cx, |this, cx| {
64 this.api_key = None;
65 cx.notify();
66 })
67 })
68 }
69}
70
71impl OpenAiLanguageModelProvider {
72 pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut AppContext) -> Self {
73 let state = cx.new_model(|cx| State {
74 api_key: None,
75 _subscription: cx.observe_global::<SettingsStore>(|_this: &mut State, cx| {
76 cx.notify();
77 }),
78 });
79
80 Self { http_client, state }
81 }
82}
83
84impl LanguageModelProviderState for OpenAiLanguageModelProvider {
85 type ObservableEntity = State;
86
87 fn observable_entity(&self) -> Option<gpui::Model<Self::ObservableEntity>> {
88 Some(self.state.clone())
89 }
90}
91
92impl LanguageModelProvider for OpenAiLanguageModelProvider {
93 fn id(&self) -> LanguageModelProviderId {
94 LanguageModelProviderId(PROVIDER_ID.into())
95 }
96
97 fn name(&self) -> LanguageModelProviderName {
98 LanguageModelProviderName(PROVIDER_NAME.into())
99 }
100
101 fn provided_models(&self, cx: &AppContext) -> Vec<Arc<dyn LanguageModel>> {
102 let mut models = BTreeMap::default();
103
104 // Add base models from open_ai::Model::iter()
105 for model in open_ai::Model::iter() {
106 if !matches!(model, open_ai::Model::Custom { .. }) {
107 models.insert(model.id().to_string(), model);
108 }
109 }
110
111 // Override with available models from settings
112 for model in &AllLanguageModelSettings::get_global(cx)
113 .openai
114 .available_models
115 {
116 models.insert(
117 model.name.clone(),
118 open_ai::Model::Custom {
119 name: model.name.clone(),
120 max_tokens: model.max_tokens,
121 },
122 );
123 }
124
125 models
126 .into_values()
127 .map(|model| {
128 Arc::new(OpenAiLanguageModel {
129 id: LanguageModelId::from(model.id().to_string()),
130 model,
131 state: self.state.clone(),
132 http_client: self.http_client.clone(),
133 request_limiter: RateLimiter::new(4),
134 }) as Arc<dyn LanguageModel>
135 })
136 .collect()
137 }
138
139 fn is_authenticated(&self, cx: &AppContext) -> bool {
140 self.state.read(cx).is_authenticated()
141 }
142
143 fn authenticate(&self, cx: &mut AppContext) -> Task<Result<()>> {
144 if self.is_authenticated(cx) {
145 Task::ready(Ok(()))
146 } else {
147 let api_url = AllLanguageModelSettings::get_global(cx)
148 .openai
149 .api_url
150 .clone();
151 let state = self.state.clone();
152 cx.spawn(|mut cx| async move {
153 let api_key = if let Ok(api_key) = std::env::var("OPENAI_API_KEY") {
154 api_key
155 } else {
156 let (_, api_key) = cx
157 .update(|cx| cx.read_credentials(&api_url))?
158 .await?
159 .ok_or_else(|| anyhow!("credentials not found"))?;
160 String::from_utf8(api_key)?
161 };
162 state.update(&mut cx, |this, cx| {
163 this.api_key = Some(api_key);
164 cx.notify();
165 })
166 })
167 }
168 }
169
170 fn configuration_view(&self, cx: &mut WindowContext) -> (AnyView, Option<FocusHandle>) {
171 let view = cx.new_view(|cx| ConfigurationView::new(self.state.clone(), cx));
172 let focus_handle = view.focus_handle(cx);
173 (view.into(), Some(focus_handle))
174 }
175
176 fn reset_credentials(&self, cx: &mut AppContext) -> Task<Result<()>> {
177 self.state.update(cx, |state, cx| state.reset_api_key(cx))
178 }
179}
180
181pub struct OpenAiLanguageModel {
182 id: LanguageModelId,
183 model: open_ai::Model,
184 state: gpui::Model<State>,
185 http_client: Arc<dyn HttpClient>,
186 request_limiter: RateLimiter,
187}
188
189impl LanguageModel for OpenAiLanguageModel {
190 fn id(&self) -> LanguageModelId {
191 self.id.clone()
192 }
193
194 fn name(&self) -> LanguageModelName {
195 LanguageModelName::from(self.model.display_name().to_string())
196 }
197
198 fn provider_id(&self) -> LanguageModelProviderId {
199 LanguageModelProviderId(PROVIDER_ID.into())
200 }
201
202 fn provider_name(&self) -> LanguageModelProviderName {
203 LanguageModelProviderName(PROVIDER_NAME.into())
204 }
205
206 fn telemetry_id(&self) -> String {
207 format!("openai/{}", self.model.id())
208 }
209
210 fn max_token_count(&self) -> usize {
211 self.model.max_token_count()
212 }
213
214 fn count_tokens(
215 &self,
216 request: LanguageModelRequest,
217 cx: &AppContext,
218 ) -> BoxFuture<'static, Result<usize>> {
219 count_open_ai_tokens(request, self.model.clone(), cx)
220 }
221
222 fn stream_completion(
223 &self,
224 request: LanguageModelRequest,
225 cx: &AsyncAppContext,
226 ) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<String>>>> {
227 let request = request.into_open_ai(self.model.id().into());
228
229 let http_client = self.http_client.clone();
230 let Ok((api_key, api_url, low_speed_timeout)) = cx.read_model(&self.state, |state, cx| {
231 let settings = &AllLanguageModelSettings::get_global(cx).openai;
232 (
233 state.api_key.clone(),
234 settings.api_url.clone(),
235 settings.low_speed_timeout,
236 )
237 }) else {
238 return futures::future::ready(Err(anyhow!("App state dropped"))).boxed();
239 };
240
241 let future = self.request_limiter.stream(async move {
242 let api_key = api_key.ok_or_else(|| anyhow!("missing api key"))?;
243 let request = stream_completion(
244 http_client.as_ref(),
245 &api_url,
246 &api_key,
247 request,
248 low_speed_timeout,
249 );
250 let response = request.await?;
251 Ok(open_ai::extract_text_from_events(response).boxed())
252 });
253
254 async move { Ok(future.await?.boxed()) }.boxed()
255 }
256
257 fn use_any_tool(
258 &self,
259 _request: LanguageModelRequest,
260 _name: String,
261 _description: String,
262 _schema: serde_json::Value,
263 _cx: &AsyncAppContext,
264 ) -> BoxFuture<'static, Result<serde_json::Value>> {
265 future::ready(Err(anyhow!("not implemented"))).boxed()
266 }
267}
268
269pub fn count_open_ai_tokens(
270 request: LanguageModelRequest,
271 model: open_ai::Model,
272 cx: &AppContext,
273) -> BoxFuture<'static, Result<usize>> {
274 cx.background_executor()
275 .spawn(async move {
276 let messages = request
277 .messages
278 .into_iter()
279 .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
280 role: match message.role {
281 Role::User => "user".into(),
282 Role::Assistant => "assistant".into(),
283 Role::System => "system".into(),
284 },
285 content: Some(message.content),
286 name: None,
287 function_call: None,
288 })
289 .collect::<Vec<_>>();
290
291 if let open_ai::Model::Custom { .. } = model {
292 tiktoken_rs::num_tokens_from_messages("gpt-4", &messages)
293 } else {
294 tiktoken_rs::num_tokens_from_messages(model.id(), &messages)
295 }
296 })
297 .boxed()
298}
299
300struct ConfigurationView {
301 api_key_editor: View<Editor>,
302 state: gpui::Model<State>,
303}
304
305impl ConfigurationView {
306 fn new(state: gpui::Model<State>, cx: &mut WindowContext) -> Self {
307 Self {
308 api_key_editor: cx.new_view(|cx| {
309 let mut editor = Editor::single_line(cx);
310 editor.set_placeholder_text(
311 "sk-000000000000000000000000000000000000000000000000",
312 cx,
313 );
314 editor
315 }),
316 state,
317 }
318 }
319
320 fn save_api_key(&mut self, _: &menu::Confirm, cx: &mut ViewContext<Self>) {
321 let api_key = self.api_key_editor.read(cx).text(cx);
322 if api_key.is_empty() {
323 return;
324 }
325
326 let settings = &AllLanguageModelSettings::get_global(cx).openai;
327 let write_credentials =
328 cx.write_credentials(&settings.api_url, "Bearer", api_key.as_bytes());
329 let state = self.state.clone();
330 cx.spawn(|_, mut cx| async move {
331 write_credentials.await?;
332 state.update(&mut cx, |this, cx| {
333 this.api_key = Some(api_key);
334 cx.notify();
335 })
336 })
337 .detach_and_log_err(cx);
338 }
339
340 fn reset_api_key(&mut self, cx: &mut ViewContext<Self>) {
341 self.api_key_editor
342 .update(cx, |editor, cx| editor.set_text("", cx));
343 self.state.update(cx, |state, cx| {
344 state.reset_api_key(cx).detach_and_log_err(cx);
345 })
346 }
347
348 fn render_api_key_editor(&self, cx: &mut ViewContext<Self>) -> impl IntoElement {
349 let settings = ThemeSettings::get_global(cx);
350 let text_style = TextStyle {
351 color: cx.theme().colors().text,
352 font_family: settings.ui_font.family.clone(),
353 font_features: settings.ui_font.features.clone(),
354 font_fallbacks: settings.ui_font.fallbacks.clone(),
355 font_size: rems(0.875).into(),
356 font_weight: settings.ui_font.weight,
357 font_style: FontStyle::Normal,
358 line_height: relative(1.3),
359 background_color: None,
360 underline: None,
361 strikethrough: None,
362 white_space: WhiteSpace::Normal,
363 };
364 EditorElement::new(
365 &self.api_key_editor,
366 EditorStyle {
367 background: cx.theme().colors().editor_background,
368 local_player: cx.theme().players().local(),
369 text: text_style,
370 ..Default::default()
371 },
372 )
373 }
374}
375
376impl FocusableView for ConfigurationView {
377 fn focus_handle(&self, cx: &AppContext) -> FocusHandle {
378 self.api_key_editor.read(cx).focus_handle(cx)
379 }
380}
381
382impl Render for ConfigurationView {
383 fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
384 const INSTRUCTIONS: [&str; 6] = [
385 "To use the assistant panel or inline assistant, you need to add your OpenAI API key.",
386 " - You can create an API key at: platform.openai.com/api-keys",
387 " - Make sure your OpenAI account has credits",
388 " - Having a subscription for another service like GitHub Copilot won't work.",
389 "",
390 "Paste your OpenAI API key below and hit enter to use the assistant:",
391 ];
392
393 if self.state.read(cx).is_authenticated() {
394 h_flex()
395 .size_full()
396 .justify_between()
397 .child(
398 h_flex()
399 .gap_2()
400 .child(Indicator::dot().color(Color::Success))
401 .child(Label::new("API Key configured").size(LabelSize::Small)),
402 )
403 .child(
404 Button::new("reset-key", "Reset key")
405 .icon(Some(IconName::Trash))
406 .icon_size(IconSize::Small)
407 .icon_position(IconPosition::Start)
408 .on_click(cx.listener(|this, _, cx| this.reset_api_key(cx))),
409 )
410 .into_any()
411 } else {
412 v_flex()
413 .size_full()
414 .on_action(cx.listener(Self::save_api_key))
415 .children(
416 INSTRUCTIONS.map(|instruction| Label::new(instruction).size(LabelSize::Small)),
417 )
418 .child(
419 h_flex()
420 .w_full()
421 .my_2()
422 .px_2()
423 .py_1()
424 .bg(cx.theme().colors().editor_background)
425 .rounded_md()
426 .child(self.render_api_key_editor(cx)),
427 )
428 .child(
429 Label::new(
430 "You can also assign the OPENAI_API_KEY environment variable and restart Zed.",
431 )
432 .size(LabelSize::Small),
433 )
434 .into_any()
435 }
436 }
437}