1use anyhow::{anyhow, Result};
2use collections::BTreeMap;
3use editor::{Editor, EditorElement, EditorStyle};
4use futures::{future::BoxFuture, FutureExt, StreamExt};
5use gpui::{
6 AnyView, AppContext, AsyncAppContext, FontStyle, ModelContext, Subscription, Task, TextStyle,
7 View, WhiteSpace,
8};
9use http_client::HttpClient;
10use open_ai::{
11 stream_completion, FunctionDefinition, ResponseStreamEvent, ToolChoice, ToolDefinition,
12};
13use schemars::JsonSchema;
14use serde::{Deserialize, Serialize};
15use settings::{Settings, SettingsStore};
16use std::{sync::Arc, time::Duration};
17use strum::IntoEnumIterator;
18use theme::ThemeSettings;
19use ui::{prelude::*, Icon, IconName, Tooltip};
20use util::ResultExt;
21
22use crate::LanguageModelCompletionEvent;
23use crate::{
24 settings::AllLanguageModelSettings, LanguageModel, LanguageModelId, LanguageModelName,
25 LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName,
26 LanguageModelProviderState, LanguageModelRequest, RateLimiter, Role,
27};
28
29const PROVIDER_ID: &str = "openai";
30const PROVIDER_NAME: &str = "OpenAI";
31
32#[derive(Default, Clone, Debug, PartialEq)]
33pub struct OpenAiSettings {
34 pub api_url: String,
35 pub low_speed_timeout: Option<Duration>,
36 pub available_models: Vec<AvailableModel>,
37 pub needs_setting_migration: bool,
38}
39
40#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
41pub struct AvailableModel {
42 pub name: String,
43 pub max_tokens: usize,
44 pub max_output_tokens: Option<u32>,
45}
46
47pub struct OpenAiLanguageModelProvider {
48 http_client: Arc<dyn HttpClient>,
49 state: gpui::Model<State>,
50}
51
52pub struct State {
53 api_key: Option<String>,
54 api_key_from_env: bool,
55 _subscription: Subscription,
56}
57
58const OPENAI_API_KEY_VAR: &'static str = "OPENAI_API_KEY";
59
60impl State {
61 fn is_authenticated(&self) -> bool {
62 self.api_key.is_some()
63 }
64
65 fn reset_api_key(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
66 let settings = &AllLanguageModelSettings::get_global(cx).openai;
67 let delete_credentials = cx.delete_credentials(&settings.api_url);
68 cx.spawn(|this, mut cx| async move {
69 delete_credentials.await.log_err();
70 this.update(&mut cx, |this, cx| {
71 this.api_key = None;
72 this.api_key_from_env = false;
73 cx.notify();
74 })
75 })
76 }
77
78 fn set_api_key(&mut self, api_key: String, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
79 let settings = &AllLanguageModelSettings::get_global(cx).openai;
80 let write_credentials =
81 cx.write_credentials(&settings.api_url, "Bearer", api_key.as_bytes());
82
83 cx.spawn(|this, mut cx| async move {
84 write_credentials.await?;
85 this.update(&mut cx, |this, cx| {
86 this.api_key = Some(api_key);
87 cx.notify();
88 })
89 })
90 }
91
92 fn authenticate(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
93 if self.is_authenticated() {
94 Task::ready(Ok(()))
95 } else {
96 let api_url = AllLanguageModelSettings::get_global(cx)
97 .openai
98 .api_url
99 .clone();
100 cx.spawn(|this, mut cx| async move {
101 let (api_key, from_env) = if let Ok(api_key) = std::env::var(OPENAI_API_KEY_VAR) {
102 (api_key, true)
103 } else {
104 let (_, api_key) = cx
105 .update(|cx| cx.read_credentials(&api_url))?
106 .await?
107 .ok_or_else(|| anyhow!("credentials not found"))?;
108 (String::from_utf8(api_key)?, false)
109 };
110 this.update(&mut cx, |this, cx| {
111 this.api_key = Some(api_key);
112 this.api_key_from_env = from_env;
113 cx.notify();
114 })
115 })
116 }
117 }
118}
119
120impl OpenAiLanguageModelProvider {
121 pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut AppContext) -> Self {
122 let state = cx.new_model(|cx| State {
123 api_key: None,
124 api_key_from_env: false,
125 _subscription: cx.observe_global::<SettingsStore>(|_this: &mut State, cx| {
126 cx.notify();
127 }),
128 });
129
130 Self { http_client, state }
131 }
132}
133
134impl LanguageModelProviderState for OpenAiLanguageModelProvider {
135 type ObservableEntity = State;
136
137 fn observable_entity(&self) -> Option<gpui::Model<Self::ObservableEntity>> {
138 Some(self.state.clone())
139 }
140}
141
142impl LanguageModelProvider for OpenAiLanguageModelProvider {
143 fn id(&self) -> LanguageModelProviderId {
144 LanguageModelProviderId(PROVIDER_ID.into())
145 }
146
147 fn name(&self) -> LanguageModelProviderName {
148 LanguageModelProviderName(PROVIDER_NAME.into())
149 }
150
151 fn icon(&self) -> IconName {
152 IconName::AiOpenAi
153 }
154
155 fn provided_models(&self, cx: &AppContext) -> Vec<Arc<dyn LanguageModel>> {
156 let mut models = BTreeMap::default();
157
158 // Add base models from open_ai::Model::iter()
159 for model in open_ai::Model::iter() {
160 if !matches!(model, open_ai::Model::Custom { .. }) {
161 models.insert(model.id().to_string(), model);
162 }
163 }
164
165 // Override with available models from settings
166 for model in &AllLanguageModelSettings::get_global(cx)
167 .openai
168 .available_models
169 {
170 models.insert(
171 model.name.clone(),
172 open_ai::Model::Custom {
173 name: model.name.clone(),
174 max_tokens: model.max_tokens,
175 max_output_tokens: model.max_output_tokens,
176 },
177 );
178 }
179
180 models
181 .into_values()
182 .map(|model| {
183 Arc::new(OpenAiLanguageModel {
184 id: LanguageModelId::from(model.id().to_string()),
185 model,
186 state: self.state.clone(),
187 http_client: self.http_client.clone(),
188 request_limiter: RateLimiter::new(4),
189 }) as Arc<dyn LanguageModel>
190 })
191 .collect()
192 }
193
194 fn is_authenticated(&self, cx: &AppContext) -> bool {
195 self.state.read(cx).is_authenticated()
196 }
197
198 fn authenticate(&self, cx: &mut AppContext) -> Task<Result<()>> {
199 self.state.update(cx, |state, cx| state.authenticate(cx))
200 }
201
202 fn configuration_view(&self, cx: &mut WindowContext) -> AnyView {
203 cx.new_view(|cx| ConfigurationView::new(self.state.clone(), cx))
204 .into()
205 }
206
207 fn reset_credentials(&self, cx: &mut AppContext) -> Task<Result<()>> {
208 self.state.update(cx, |state, cx| state.reset_api_key(cx))
209 }
210}
211
212pub struct OpenAiLanguageModel {
213 id: LanguageModelId,
214 model: open_ai::Model,
215 state: gpui::Model<State>,
216 http_client: Arc<dyn HttpClient>,
217 request_limiter: RateLimiter,
218}
219
220impl OpenAiLanguageModel {
221 fn stream_completion(
222 &self,
223 request: open_ai::Request,
224 cx: &AsyncAppContext,
225 ) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<ResponseStreamEvent>>>>
226 {
227 let http_client = self.http_client.clone();
228 let Ok((api_key, api_url, low_speed_timeout)) = cx.read_model(&self.state, |state, cx| {
229 let settings = &AllLanguageModelSettings::get_global(cx).openai;
230 (
231 state.api_key.clone(),
232 settings.api_url.clone(),
233 settings.low_speed_timeout,
234 )
235 }) else {
236 return futures::future::ready(Err(anyhow!("App state dropped"))).boxed();
237 };
238
239 let future = self.request_limiter.stream(async move {
240 let api_key = api_key.ok_or_else(|| anyhow!("missing api key"))?;
241 let request = stream_completion(
242 http_client.as_ref(),
243 &api_url,
244 &api_key,
245 request,
246 low_speed_timeout,
247 );
248 let response = request.await?;
249 Ok(response)
250 });
251
252 async move { Ok(future.await?.boxed()) }.boxed()
253 }
254}
255
256impl LanguageModel for OpenAiLanguageModel {
257 fn id(&self) -> LanguageModelId {
258 self.id.clone()
259 }
260
261 fn name(&self) -> LanguageModelName {
262 LanguageModelName::from(self.model.display_name().to_string())
263 }
264
265 fn provider_id(&self) -> LanguageModelProviderId {
266 LanguageModelProviderId(PROVIDER_ID.into())
267 }
268
269 fn provider_name(&self) -> LanguageModelProviderName {
270 LanguageModelProviderName(PROVIDER_NAME.into())
271 }
272
273 fn telemetry_id(&self) -> String {
274 format!("openai/{}", self.model.id())
275 }
276
277 fn max_token_count(&self) -> usize {
278 self.model.max_token_count()
279 }
280
281 fn max_output_tokens(&self) -> Option<u32> {
282 self.model.max_output_tokens()
283 }
284
285 fn count_tokens(
286 &self,
287 request: LanguageModelRequest,
288 cx: &AppContext,
289 ) -> BoxFuture<'static, Result<usize>> {
290 count_open_ai_tokens(request, self.model.clone(), cx)
291 }
292
293 fn stream_completion(
294 &self,
295 request: LanguageModelRequest,
296 cx: &AsyncAppContext,
297 ) -> BoxFuture<
298 'static,
299 Result<futures::stream::BoxStream<'static, Result<LanguageModelCompletionEvent>>>,
300 > {
301 let request = request.into_open_ai(self.model.id().into(), self.max_output_tokens());
302 let completions = self.stream_completion(request, cx);
303 async move {
304 Ok(open_ai::extract_text_from_events(completions.await?)
305 .map(|result| result.map(LanguageModelCompletionEvent::Text))
306 .boxed())
307 }
308 .boxed()
309 }
310
311 fn use_any_tool(
312 &self,
313 request: LanguageModelRequest,
314 tool_name: String,
315 tool_description: String,
316 schema: serde_json::Value,
317 cx: &AsyncAppContext,
318 ) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<String>>>> {
319 let mut request = request.into_open_ai(self.model.id().into(), self.max_output_tokens());
320 request.tool_choice = Some(ToolChoice::Other(ToolDefinition::Function {
321 function: FunctionDefinition {
322 name: tool_name.clone(),
323 description: None,
324 parameters: None,
325 },
326 }));
327 request.tools = vec![ToolDefinition::Function {
328 function: FunctionDefinition {
329 name: tool_name.clone(),
330 description: Some(tool_description),
331 parameters: Some(schema),
332 },
333 }];
334
335 let response = self.stream_completion(request, cx);
336 self.request_limiter
337 .run(async move {
338 let response = response.await?;
339 Ok(
340 open_ai::extract_tool_args_from_events(tool_name, Box::pin(response))
341 .await?
342 .boxed(),
343 )
344 })
345 .boxed()
346 }
347}
348
349pub fn count_open_ai_tokens(
350 request: LanguageModelRequest,
351 model: open_ai::Model,
352 cx: &AppContext,
353) -> BoxFuture<'static, Result<usize>> {
354 cx.background_executor()
355 .spawn(async move {
356 let messages = request
357 .messages
358 .into_iter()
359 .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
360 role: match message.role {
361 Role::User => "user".into(),
362 Role::Assistant => "assistant".into(),
363 Role::System => "system".into(),
364 },
365 content: Some(message.string_contents()),
366 name: None,
367 function_call: None,
368 })
369 .collect::<Vec<_>>();
370
371 if let open_ai::Model::Custom { .. } = model {
372 tiktoken_rs::num_tokens_from_messages("gpt-4", &messages)
373 } else {
374 tiktoken_rs::num_tokens_from_messages(model.id(), &messages)
375 }
376 })
377 .boxed()
378}
379
380struct ConfigurationView {
381 api_key_editor: View<Editor>,
382 state: gpui::Model<State>,
383 load_credentials_task: Option<Task<()>>,
384}
385
386impl ConfigurationView {
387 fn new(state: gpui::Model<State>, cx: &mut ViewContext<Self>) -> Self {
388 let api_key_editor = cx.new_view(|cx| {
389 let mut editor = Editor::single_line(cx);
390 editor.set_placeholder_text("sk-000000000000000000000000000000000000000000000000", cx);
391 editor
392 });
393
394 cx.observe(&state, |_, _, cx| {
395 cx.notify();
396 })
397 .detach();
398
399 let load_credentials_task = Some(cx.spawn({
400 let state = state.clone();
401 |this, mut cx| async move {
402 if let Some(task) = state
403 .update(&mut cx, |state, cx| state.authenticate(cx))
404 .log_err()
405 {
406 // We don't log an error, because "not signed in" is also an error.
407 let _ = task.await;
408 }
409
410 this.update(&mut cx, |this, cx| {
411 this.load_credentials_task = None;
412 cx.notify();
413 })
414 .log_err();
415 }
416 }));
417
418 Self {
419 api_key_editor,
420 state,
421 load_credentials_task,
422 }
423 }
424
425 fn save_api_key(&mut self, _: &menu::Confirm, cx: &mut ViewContext<Self>) {
426 let api_key = self.api_key_editor.read(cx).text(cx);
427 if api_key.is_empty() {
428 return;
429 }
430
431 let state = self.state.clone();
432 cx.spawn(|_, mut cx| async move {
433 state
434 .update(&mut cx, |state, cx| state.set_api_key(api_key, cx))?
435 .await
436 })
437 .detach_and_log_err(cx);
438
439 cx.notify();
440 }
441
442 fn reset_api_key(&mut self, cx: &mut ViewContext<Self>) {
443 self.api_key_editor
444 .update(cx, |editor, cx| editor.set_text("", cx));
445
446 let state = self.state.clone();
447 cx.spawn(|_, mut cx| async move {
448 state
449 .update(&mut cx, |state, cx| state.reset_api_key(cx))?
450 .await
451 })
452 .detach_and_log_err(cx);
453
454 cx.notify();
455 }
456
457 fn render_api_key_editor(&self, cx: &mut ViewContext<Self>) -> impl IntoElement {
458 let settings = ThemeSettings::get_global(cx);
459 let text_style = TextStyle {
460 color: cx.theme().colors().text,
461 font_family: settings.ui_font.family.clone(),
462 font_features: settings.ui_font.features.clone(),
463 font_fallbacks: settings.ui_font.fallbacks.clone(),
464 font_size: rems(0.875).into(),
465 font_weight: settings.ui_font.weight,
466 font_style: FontStyle::Normal,
467 line_height: relative(1.3),
468 background_color: None,
469 underline: None,
470 strikethrough: None,
471 white_space: WhiteSpace::Normal,
472 truncate: None,
473 };
474 EditorElement::new(
475 &self.api_key_editor,
476 EditorStyle {
477 background: cx.theme().colors().editor_background,
478 local_player: cx.theme().players().local(),
479 text: text_style,
480 ..Default::default()
481 },
482 )
483 }
484
485 fn should_render_editor(&self, cx: &mut ViewContext<Self>) -> bool {
486 !self.state.read(cx).is_authenticated()
487 }
488}
489
490impl Render for ConfigurationView {
491 fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
492 const OPENAI_CONSOLE_URL: &str = "https://console.anthropic.com/settings/keys";
493 const INSTRUCTIONS: [&str; 6] = [
494 "To use the assistant panel or inline assistant, you need to add your OpenAI API key.",
495 " - You can create an API key at: ",
496 " - Make sure your OpenAI account has credits",
497 " - Having a subscription for another service like GitHub Copilot won't work.",
498 "",
499 "Paste your OpenAI API key below and hit enter to use the assistant:",
500 ];
501
502 let env_var_set = self.state.read(cx).api_key_from_env;
503
504 if self.load_credentials_task.is_some() {
505 div().child(Label::new("Loading credentials...")).into_any()
506 } else if self.should_render_editor(cx) {
507 v_flex()
508 .size_full()
509 .on_action(cx.listener(Self::save_api_key))
510 .child(Label::new(INSTRUCTIONS[0]))
511 .child(h_flex().child(Label::new(INSTRUCTIONS[1])).child(
512 Button::new("openai_console", OPENAI_CONSOLE_URL)
513 .style(ButtonStyle::Subtle)
514 .icon(IconName::ExternalLink)
515 .icon_size(IconSize::XSmall)
516 .icon_color(Color::Muted)
517 .on_click(move |_, cx| cx.open_url(OPENAI_CONSOLE_URL))
518 )
519 )
520 .children(
521 (2..INSTRUCTIONS.len()).map(|n|
522 Label::new(INSTRUCTIONS[n])).collect::<Vec<_>>())
523 .child(
524 h_flex()
525 .w_full()
526 .my_2()
527 .px_2()
528 .py_1()
529 .bg(cx.theme().colors().editor_background)
530 .rounded_md()
531 .child(self.render_api_key_editor(cx)),
532 )
533 .child(
534 Label::new(
535 format!("You can also assign the {OPENAI_API_KEY_VAR} environment variable and restart Zed."),
536 )
537 .size(LabelSize::Small),
538 )
539 .into_any()
540 } else {
541 h_flex()
542 .size_full()
543 .justify_between()
544 .child(
545 h_flex()
546 .gap_1()
547 .child(Icon::new(IconName::Check).color(Color::Success))
548 .child(Label::new(if env_var_set {
549 format!("API key set in {OPENAI_API_KEY_VAR} environment variable.")
550 } else {
551 "API key configured.".to_string()
552 })),
553 )
554 .child(
555 Button::new("reset-key", "Reset key")
556 .icon(Some(IconName::Trash))
557 .icon_size(IconSize::Small)
558 .icon_position(IconPosition::Start)
559 .disabled(env_var_set)
560 .when(env_var_set, |this| {
561 this.tooltip(|cx| Tooltip::text(format!("To reset your API key, unset the {OPENAI_API_KEY_VAR} environment variable."), cx))
562 })
563 .on_click(cx.listener(|this, _, cx| this.reset_api_key(cx))),
564 )
565 .into_any()
566 }
567 }
568}