1use anyhow::{anyhow, Result};
2use collections::BTreeMap;
3use editor::{Editor, EditorElement, EditorStyle};
4use futures::{future::BoxFuture, FutureExt, StreamExt};
5use gpui::{
6 AnyView, AppContext, AsyncAppContext, FontStyle, ModelContext, Subscription, Task, TextStyle,
7 View, WhiteSpace,
8};
9use http_client::HttpClient;
10use language_model::{
11 LanguageModel, LanguageModelCompletionEvent, LanguageModelId, LanguageModelName,
12 LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName,
13 LanguageModelProviderState, LanguageModelRequest, RateLimiter, Role,
14};
15use open_ai::{
16 stream_completion, FunctionDefinition, ResponseStreamEvent, ToolChoice, ToolDefinition,
17};
18use schemars::JsonSchema;
19use serde::{Deserialize, Serialize};
20use settings::{Settings, SettingsStore};
21use std::sync::Arc;
22use strum::IntoEnumIterator;
23use theme::ThemeSettings;
24use ui::{prelude::*, Icon, IconName, Tooltip};
25use util::ResultExt;
26
27use crate::AllLanguageModelSettings;
28
29const PROVIDER_ID: &str = "openai";
30const PROVIDER_NAME: &str = "OpenAI";
31
32#[derive(Default, Clone, Debug, PartialEq)]
33pub struct OpenAiSettings {
34 pub api_url: String,
35 pub available_models: Vec<AvailableModel>,
36 pub needs_setting_migration: bool,
37}
38
39#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
40pub struct AvailableModel {
41 pub name: String,
42 pub display_name: Option<String>,
43 pub max_tokens: usize,
44 pub max_output_tokens: Option<u32>,
45 pub max_completion_tokens: Option<u32>,
46}
47
48pub struct OpenAiLanguageModelProvider {
49 http_client: Arc<dyn HttpClient>,
50 state: gpui::Model<State>,
51}
52
53pub struct State {
54 api_key: Option<String>,
55 api_key_from_env: bool,
56 _subscription: Subscription,
57}
58
59const OPENAI_API_KEY_VAR: &str = "OPENAI_API_KEY";
60
61impl State {
62 fn is_authenticated(&self) -> bool {
63 self.api_key.is_some()
64 }
65
66 fn reset_api_key(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
67 let settings = &AllLanguageModelSettings::get_global(cx).openai;
68 let delete_credentials = cx.delete_credentials(&settings.api_url);
69 cx.spawn(|this, mut cx| async move {
70 delete_credentials.await.log_err();
71 this.update(&mut cx, |this, cx| {
72 this.api_key = None;
73 this.api_key_from_env = false;
74 cx.notify();
75 })
76 })
77 }
78
79 fn set_api_key(&mut self, api_key: String, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
80 let settings = &AllLanguageModelSettings::get_global(cx).openai;
81 let write_credentials =
82 cx.write_credentials(&settings.api_url, "Bearer", api_key.as_bytes());
83
84 cx.spawn(|this, mut cx| async move {
85 write_credentials.await?;
86 this.update(&mut cx, |this, cx| {
87 this.api_key = Some(api_key);
88 cx.notify();
89 })
90 })
91 }
92
93 fn authenticate(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
94 if self.is_authenticated() {
95 Task::ready(Ok(()))
96 } else {
97 let api_url = AllLanguageModelSettings::get_global(cx)
98 .openai
99 .api_url
100 .clone();
101 cx.spawn(|this, mut cx| async move {
102 let (api_key, from_env) = if let Ok(api_key) = std::env::var(OPENAI_API_KEY_VAR) {
103 (api_key, true)
104 } else {
105 let (_, api_key) = cx
106 .update(|cx| cx.read_credentials(&api_url))?
107 .await?
108 .ok_or_else(|| anyhow!("credentials not found"))?;
109 (String::from_utf8(api_key)?, false)
110 };
111 this.update(&mut cx, |this, cx| {
112 this.api_key = Some(api_key);
113 this.api_key_from_env = from_env;
114 cx.notify();
115 })
116 })
117 }
118 }
119}
120
121impl OpenAiLanguageModelProvider {
122 pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut AppContext) -> Self {
123 let state = cx.new_model(|cx| State {
124 api_key: None,
125 api_key_from_env: false,
126 _subscription: cx.observe_global::<SettingsStore>(|_this: &mut State, cx| {
127 cx.notify();
128 }),
129 });
130
131 Self { http_client, state }
132 }
133}
134
135impl LanguageModelProviderState for OpenAiLanguageModelProvider {
136 type ObservableEntity = State;
137
138 fn observable_entity(&self) -> Option<gpui::Model<Self::ObservableEntity>> {
139 Some(self.state.clone())
140 }
141}
142
143impl LanguageModelProvider for OpenAiLanguageModelProvider {
144 fn id(&self) -> LanguageModelProviderId {
145 LanguageModelProviderId(PROVIDER_ID.into())
146 }
147
148 fn name(&self) -> LanguageModelProviderName {
149 LanguageModelProviderName(PROVIDER_NAME.into())
150 }
151
152 fn icon(&self) -> IconName {
153 IconName::AiOpenAi
154 }
155
156 fn provided_models(&self, cx: &AppContext) -> Vec<Arc<dyn LanguageModel>> {
157 let mut models = BTreeMap::default();
158
159 // Add base models from open_ai::Model::iter()
160 for model in open_ai::Model::iter() {
161 if !matches!(model, open_ai::Model::Custom { .. }) {
162 models.insert(model.id().to_string(), model);
163 }
164 }
165
166 // Override with available models from settings
167 for model in &AllLanguageModelSettings::get_global(cx)
168 .openai
169 .available_models
170 {
171 models.insert(
172 model.name.clone(),
173 open_ai::Model::Custom {
174 name: model.name.clone(),
175 display_name: model.display_name.clone(),
176 max_tokens: model.max_tokens,
177 max_output_tokens: model.max_output_tokens,
178 max_completion_tokens: model.max_completion_tokens,
179 },
180 );
181 }
182
183 models
184 .into_values()
185 .map(|model| {
186 Arc::new(OpenAiLanguageModel {
187 id: LanguageModelId::from(model.id().to_string()),
188 model,
189 state: self.state.clone(),
190 http_client: self.http_client.clone(),
191 request_limiter: RateLimiter::new(4),
192 }) as Arc<dyn LanguageModel>
193 })
194 .collect()
195 }
196
197 fn is_authenticated(&self, cx: &AppContext) -> bool {
198 self.state.read(cx).is_authenticated()
199 }
200
201 fn authenticate(&self, cx: &mut AppContext) -> Task<Result<()>> {
202 self.state.update(cx, |state, cx| state.authenticate(cx))
203 }
204
205 fn configuration_view(&self, cx: &mut WindowContext) -> AnyView {
206 cx.new_view(|cx| ConfigurationView::new(self.state.clone(), cx))
207 .into()
208 }
209
210 fn reset_credentials(&self, cx: &mut AppContext) -> Task<Result<()>> {
211 self.state.update(cx, |state, cx| state.reset_api_key(cx))
212 }
213}
214
215pub struct OpenAiLanguageModel {
216 id: LanguageModelId,
217 model: open_ai::Model,
218 state: gpui::Model<State>,
219 http_client: Arc<dyn HttpClient>,
220 request_limiter: RateLimiter,
221}
222
223impl OpenAiLanguageModel {
224 fn stream_completion(
225 &self,
226 request: open_ai::Request,
227 cx: &AsyncAppContext,
228 ) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<ResponseStreamEvent>>>>
229 {
230 let http_client = self.http_client.clone();
231 let Ok((api_key, api_url)) = cx.read_model(&self.state, |state, cx| {
232 let settings = &AllLanguageModelSettings::get_global(cx).openai;
233 (state.api_key.clone(), settings.api_url.clone())
234 }) else {
235 return futures::future::ready(Err(anyhow!("App state dropped"))).boxed();
236 };
237
238 let future = self.request_limiter.stream(async move {
239 let api_key = api_key.ok_or_else(|| anyhow!("Missing OpenAI API Key"))?;
240 let request = stream_completion(http_client.as_ref(), &api_url, &api_key, request);
241 let response = request.await?;
242 Ok(response)
243 });
244
245 async move { Ok(future.await?.boxed()) }.boxed()
246 }
247}
248
249impl LanguageModel for OpenAiLanguageModel {
250 fn id(&self) -> LanguageModelId {
251 self.id.clone()
252 }
253
254 fn name(&self) -> LanguageModelName {
255 LanguageModelName::from(self.model.display_name().to_string())
256 }
257
258 fn provider_id(&self) -> LanguageModelProviderId {
259 LanguageModelProviderId(PROVIDER_ID.into())
260 }
261
262 fn provider_name(&self) -> LanguageModelProviderName {
263 LanguageModelProviderName(PROVIDER_NAME.into())
264 }
265
266 fn telemetry_id(&self) -> String {
267 format!("openai/{}", self.model.id())
268 }
269
270 fn max_token_count(&self) -> usize {
271 self.model.max_token_count()
272 }
273
274 fn max_output_tokens(&self) -> Option<u32> {
275 self.model.max_output_tokens()
276 }
277
278 fn count_tokens(
279 &self,
280 request: LanguageModelRequest,
281 cx: &AppContext,
282 ) -> BoxFuture<'static, Result<usize>> {
283 count_open_ai_tokens(request, self.model.clone(), cx)
284 }
285
286 fn stream_completion(
287 &self,
288 request: LanguageModelRequest,
289 cx: &AsyncAppContext,
290 ) -> BoxFuture<
291 'static,
292 Result<futures::stream::BoxStream<'static, Result<LanguageModelCompletionEvent>>>,
293 > {
294 let request = request.into_open_ai(self.model.id().into(), self.max_output_tokens());
295 let completions = self.stream_completion(request, cx);
296 async move {
297 Ok(open_ai::extract_text_from_events(completions.await?)
298 .map(|result| result.map(LanguageModelCompletionEvent::Text))
299 .boxed())
300 }
301 .boxed()
302 }
303
304 fn use_any_tool(
305 &self,
306 request: LanguageModelRequest,
307 tool_name: String,
308 tool_description: String,
309 schema: serde_json::Value,
310 cx: &AsyncAppContext,
311 ) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<String>>>> {
312 let mut request = request.into_open_ai(self.model.id().into(), self.max_output_tokens());
313 request.tool_choice = Some(ToolChoice::Other(ToolDefinition::Function {
314 function: FunctionDefinition {
315 name: tool_name.clone(),
316 description: None,
317 parameters: None,
318 },
319 }));
320 request.tools = vec![ToolDefinition::Function {
321 function: FunctionDefinition {
322 name: tool_name.clone(),
323 description: Some(tool_description),
324 parameters: Some(schema),
325 },
326 }];
327
328 let response = self.stream_completion(request, cx);
329 self.request_limiter
330 .run(async move {
331 let response = response.await?;
332 Ok(
333 open_ai::extract_tool_args_from_events(tool_name, Box::pin(response))
334 .await?
335 .boxed(),
336 )
337 })
338 .boxed()
339 }
340}
341
342pub fn count_open_ai_tokens(
343 request: LanguageModelRequest,
344 model: open_ai::Model,
345 cx: &AppContext,
346) -> BoxFuture<'static, Result<usize>> {
347 cx.background_executor()
348 .spawn(async move {
349 let messages = request
350 .messages
351 .into_iter()
352 .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
353 role: match message.role {
354 Role::User => "user".into(),
355 Role::Assistant => "assistant".into(),
356 Role::System => "system".into(),
357 },
358 content: Some(message.string_contents()),
359 name: None,
360 function_call: None,
361 })
362 .collect::<Vec<_>>();
363
364 match model {
365 open_ai::Model::Custom { .. }
366 | open_ai::Model::O1Mini
367 | open_ai::Model::O1Preview => {
368 tiktoken_rs::num_tokens_from_messages("gpt-4", &messages)
369 }
370 _ => tiktoken_rs::num_tokens_from_messages(model.id(), &messages),
371 }
372 })
373 .boxed()
374}
375
376struct ConfigurationView {
377 api_key_editor: View<Editor>,
378 state: gpui::Model<State>,
379 load_credentials_task: Option<Task<()>>,
380}
381
382impl ConfigurationView {
383 fn new(state: gpui::Model<State>, cx: &mut ViewContext<Self>) -> Self {
384 let api_key_editor = cx.new_view(|cx| {
385 let mut editor = Editor::single_line(cx);
386 editor.set_placeholder_text("sk-000000000000000000000000000000000000000000000000", cx);
387 editor
388 });
389
390 cx.observe(&state, |_, _, cx| {
391 cx.notify();
392 })
393 .detach();
394
395 let load_credentials_task = Some(cx.spawn({
396 let state = state.clone();
397 |this, mut cx| async move {
398 if let Some(task) = state
399 .update(&mut cx, |state, cx| state.authenticate(cx))
400 .log_err()
401 {
402 // We don't log an error, because "not signed in" is also an error.
403 let _ = task.await;
404 }
405
406 this.update(&mut cx, |this, cx| {
407 this.load_credentials_task = None;
408 cx.notify();
409 })
410 .log_err();
411 }
412 }));
413
414 Self {
415 api_key_editor,
416 state,
417 load_credentials_task,
418 }
419 }
420
421 fn save_api_key(&mut self, _: &menu::Confirm, cx: &mut ViewContext<Self>) {
422 let api_key = self.api_key_editor.read(cx).text(cx);
423 if api_key.is_empty() {
424 return;
425 }
426
427 let state = self.state.clone();
428 cx.spawn(|_, mut cx| async move {
429 state
430 .update(&mut cx, |state, cx| state.set_api_key(api_key, cx))?
431 .await
432 })
433 .detach_and_log_err(cx);
434
435 cx.notify();
436 }
437
438 fn reset_api_key(&mut self, cx: &mut ViewContext<Self>) {
439 self.api_key_editor
440 .update(cx, |editor, cx| editor.set_text("", cx));
441
442 let state = self.state.clone();
443 cx.spawn(|_, mut cx| async move {
444 state
445 .update(&mut cx, |state, cx| state.reset_api_key(cx))?
446 .await
447 })
448 .detach_and_log_err(cx);
449
450 cx.notify();
451 }
452
453 fn render_api_key_editor(&self, cx: &mut ViewContext<Self>) -> impl IntoElement {
454 let settings = ThemeSettings::get_global(cx);
455 let text_style = TextStyle {
456 color: cx.theme().colors().text,
457 font_family: settings.ui_font.family.clone(),
458 font_features: settings.ui_font.features.clone(),
459 font_fallbacks: settings.ui_font.fallbacks.clone(),
460 font_size: rems(0.875).into(),
461 font_weight: settings.ui_font.weight,
462 font_style: FontStyle::Normal,
463 line_height: relative(1.3),
464 background_color: None,
465 underline: None,
466 strikethrough: None,
467 white_space: WhiteSpace::Normal,
468 truncate: None,
469 };
470 EditorElement::new(
471 &self.api_key_editor,
472 EditorStyle {
473 background: cx.theme().colors().editor_background,
474 local_player: cx.theme().players().local(),
475 text: text_style,
476 ..Default::default()
477 },
478 )
479 }
480
481 fn should_render_editor(&self, cx: &mut ViewContext<Self>) -> bool {
482 !self.state.read(cx).is_authenticated()
483 }
484}
485
486impl Render for ConfigurationView {
487 fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
488 const OPENAI_CONSOLE_URL: &str = "https://platform.openai.com/api-keys";
489 const INSTRUCTIONS: [&str; 4] = [
490 "To use Zed's assistant with OpenAI, you need to add an API key. Follow these steps:",
491 " - Create one by visiting:",
492 " - Ensure your OpenAI account has credits",
493 " - Paste your API key below and hit enter to start using the assistant",
494 ];
495
496 let env_var_set = self.state.read(cx).api_key_from_env;
497
498 if self.load_credentials_task.is_some() {
499 div().child(Label::new("Loading credentials...")).into_any()
500 } else if self.should_render_editor(cx) {
501 v_flex()
502 .size_full()
503 .on_action(cx.listener(Self::save_api_key))
504 .child(Label::new(INSTRUCTIONS[0]))
505 .child(h_flex().child(Label::new(INSTRUCTIONS[1])).child(
506 Button::new("openai_console", OPENAI_CONSOLE_URL)
507 .style(ButtonStyle::Subtle)
508 .icon(IconName::ExternalLink)
509 .icon_size(IconSize::XSmall)
510 .icon_color(Color::Muted)
511 .on_click(move |_, cx| cx.open_url(OPENAI_CONSOLE_URL))
512 )
513 )
514 .children(
515 (2..INSTRUCTIONS.len()).map(|n|
516 Label::new(INSTRUCTIONS[n])).collect::<Vec<_>>())
517 .child(
518 h_flex()
519 .w_full()
520 .my_2()
521 .px_2()
522 .py_1()
523 .bg(cx.theme().colors().editor_background)
524 .rounded_md()
525 .child(self.render_api_key_editor(cx)),
526 )
527 .child(
528 Label::new(
529 format!("You can also assign the {OPENAI_API_KEY_VAR} environment variable and restart Zed."),
530 )
531 .size(LabelSize::Small),
532 )
533 .child(
534 Label::new(
535 "Note that having a subscription for another service like GitHub Copilot won't work.".to_string(),
536 )
537 .size(LabelSize::Small),
538 )
539 .into_any()
540 } else {
541 h_flex()
542 .size_full()
543 .justify_between()
544 .child(
545 h_flex()
546 .gap_1()
547 .child(Icon::new(IconName::Check).color(Color::Success))
548 .child(Label::new(if env_var_set {
549 format!("API key set in {OPENAI_API_KEY_VAR} environment variable.")
550 } else {
551 "API key configured.".to_string()
552 })),
553 )
554 .child(
555 Button::new("reset-key", "Reset key")
556 .icon(Some(IconName::Trash))
557 .icon_size(IconSize::Small)
558 .icon_position(IconPosition::Start)
559 .disabled(env_var_set)
560 .when(env_var_set, |this| {
561 this.tooltip(|cx| Tooltip::text(format!("To reset your API key, unset the {OPENAI_API_KEY_VAR} environment variable."), cx))
562 })
563 .on_click(cx.listener(|this, _, cx| this.reset_api_key(cx))),
564 )
565 .into_any()
566 }
567 }
568}