1use anyhow::{anyhow, Result};
2use collections::BTreeMap;
3use editor::{Editor, EditorElement, EditorStyle};
4use futures::{future::BoxFuture, FutureExt, StreamExt};
5use gpui::{
6 AnyView, App, AsyncApp, Context, Entity, FontStyle, Subscription, Task, TextStyle, WhiteSpace,
7};
8use http_client::HttpClient;
9use language_model::{
10 LanguageModel, LanguageModelCompletionEvent, LanguageModelId, LanguageModelName,
11 LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName,
12 LanguageModelProviderState, LanguageModelRequest, RateLimiter, Role,
13};
14use open_ai::{
15 stream_completion, FunctionDefinition, ResponseStreamEvent, ToolChoice, ToolDefinition,
16};
17use schemars::JsonSchema;
18use serde::{Deserialize, Serialize};
19use settings::{Settings, SettingsStore};
20use std::sync::Arc;
21use strum::IntoEnumIterator;
22use theme::ThemeSettings;
23use ui::{prelude::*, Icon, IconName, Tooltip};
24use util::ResultExt;
25
26use crate::AllLanguageModelSettings;
27
28const PROVIDER_ID: &str = "openai";
29const PROVIDER_NAME: &str = "OpenAI";
30
31#[derive(Default, Clone, Debug, PartialEq)]
32pub struct OpenAiSettings {
33 pub api_url: String,
34 pub available_models: Vec<AvailableModel>,
35 pub needs_setting_migration: bool,
36}
37
38#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
39pub struct AvailableModel {
40 pub name: String,
41 pub display_name: Option<String>,
42 pub max_tokens: usize,
43 pub max_output_tokens: Option<u32>,
44 pub max_completion_tokens: Option<u32>,
45}
46
47pub struct OpenAiLanguageModelProvider {
48 http_client: Arc<dyn HttpClient>,
49 state: gpui::Entity<State>,
50}
51
52pub struct State {
53 api_key: Option<String>,
54 api_key_from_env: bool,
55 _subscription: Subscription,
56}
57
58const OPENAI_API_KEY_VAR: &str = "OPENAI_API_KEY";
59
60impl State {
61 fn is_authenticated(&self) -> bool {
62 self.api_key.is_some()
63 }
64
65 fn reset_api_key(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
66 let settings = &AllLanguageModelSettings::get_global(cx).openai;
67 let delete_credentials = cx.delete_credentials(&settings.api_url);
68 cx.spawn(|this, mut cx| async move {
69 delete_credentials.await.log_err();
70 this.update(&mut cx, |this, cx| {
71 this.api_key = None;
72 this.api_key_from_env = false;
73 cx.notify();
74 })
75 })
76 }
77
78 fn set_api_key(&mut self, api_key: String, cx: &mut Context<Self>) -> Task<Result<()>> {
79 let settings = &AllLanguageModelSettings::get_global(cx).openai;
80 let write_credentials =
81 cx.write_credentials(&settings.api_url, "Bearer", api_key.as_bytes());
82
83 cx.spawn(|this, mut cx| async move {
84 write_credentials.await?;
85 this.update(&mut cx, |this, cx| {
86 this.api_key = Some(api_key);
87 cx.notify();
88 })
89 })
90 }
91
92 fn authenticate(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
93 if self.is_authenticated() {
94 Task::ready(Ok(()))
95 } else {
96 let api_url = AllLanguageModelSettings::get_global(cx)
97 .openai
98 .api_url
99 .clone();
100 cx.spawn(|this, mut cx| async move {
101 let (api_key, from_env) = if let Ok(api_key) = std::env::var(OPENAI_API_KEY_VAR) {
102 (api_key, true)
103 } else {
104 let (_, api_key) = cx
105 .update(|cx| cx.read_credentials(&api_url))?
106 .await?
107 .ok_or_else(|| anyhow!("credentials not found"))?;
108 (String::from_utf8(api_key)?, false)
109 };
110 this.update(&mut cx, |this, cx| {
111 this.api_key = Some(api_key);
112 this.api_key_from_env = from_env;
113 cx.notify();
114 })
115 })
116 }
117 }
118}
119
120impl OpenAiLanguageModelProvider {
121 pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
122 let state = cx.new(|cx| State {
123 api_key: None,
124 api_key_from_env: false,
125 _subscription: cx.observe_global::<SettingsStore>(|_this: &mut State, cx| {
126 cx.notify();
127 }),
128 });
129
130 Self { http_client, state }
131 }
132}
133
134impl LanguageModelProviderState for OpenAiLanguageModelProvider {
135 type ObservableEntity = State;
136
137 fn observable_entity(&self) -> Option<gpui::Entity<Self::ObservableEntity>> {
138 Some(self.state.clone())
139 }
140}
141
142impl LanguageModelProvider for OpenAiLanguageModelProvider {
143 fn id(&self) -> LanguageModelProviderId {
144 LanguageModelProviderId(PROVIDER_ID.into())
145 }
146
147 fn name(&self) -> LanguageModelProviderName {
148 LanguageModelProviderName(PROVIDER_NAME.into())
149 }
150
151 fn icon(&self) -> IconName {
152 IconName::AiOpenAi
153 }
154
155 fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
156 let mut models = BTreeMap::default();
157
158 // Add base models from open_ai::Model::iter()
159 for model in open_ai::Model::iter() {
160 if !matches!(model, open_ai::Model::Custom { .. }) {
161 models.insert(model.id().to_string(), model);
162 }
163 }
164
165 // Override with available models from settings
166 for model in &AllLanguageModelSettings::get_global(cx)
167 .openai
168 .available_models
169 {
170 models.insert(
171 model.name.clone(),
172 open_ai::Model::Custom {
173 name: model.name.clone(),
174 display_name: model.display_name.clone(),
175 max_tokens: model.max_tokens,
176 max_output_tokens: model.max_output_tokens,
177 max_completion_tokens: model.max_completion_tokens,
178 },
179 );
180 }
181
182 models
183 .into_values()
184 .map(|model| {
185 Arc::new(OpenAiLanguageModel {
186 id: LanguageModelId::from(model.id().to_string()),
187 model,
188 state: self.state.clone(),
189 http_client: self.http_client.clone(),
190 request_limiter: RateLimiter::new(4),
191 }) as Arc<dyn LanguageModel>
192 })
193 .collect()
194 }
195
196 fn is_authenticated(&self, cx: &App) -> bool {
197 self.state.read(cx).is_authenticated()
198 }
199
200 fn authenticate(&self, cx: &mut App) -> Task<Result<()>> {
201 self.state.update(cx, |state, cx| state.authenticate(cx))
202 }
203
204 fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView {
205 cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx))
206 .into()
207 }
208
209 fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
210 self.state.update(cx, |state, cx| state.reset_api_key(cx))
211 }
212}
213
214pub struct OpenAiLanguageModel {
215 id: LanguageModelId,
216 model: open_ai::Model,
217 state: gpui::Entity<State>,
218 http_client: Arc<dyn HttpClient>,
219 request_limiter: RateLimiter,
220}
221
222impl OpenAiLanguageModel {
223 fn stream_completion(
224 &self,
225 request: open_ai::Request,
226 cx: &AsyncApp,
227 ) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<ResponseStreamEvent>>>>
228 {
229 let http_client = self.http_client.clone();
230 let Ok((api_key, api_url)) = cx.read_entity(&self.state, |state, cx| {
231 let settings = &AllLanguageModelSettings::get_global(cx).openai;
232 (state.api_key.clone(), settings.api_url.clone())
233 }) else {
234 return futures::future::ready(Err(anyhow!("App state dropped"))).boxed();
235 };
236
237 let future = self.request_limiter.stream(async move {
238 let api_key = api_key.ok_or_else(|| anyhow!("Missing OpenAI API Key"))?;
239 let request = stream_completion(http_client.as_ref(), &api_url, &api_key, request);
240 let response = request.await?;
241 Ok(response)
242 });
243
244 async move { Ok(future.await?.boxed()) }.boxed()
245 }
246}
247
248impl LanguageModel for OpenAiLanguageModel {
249 fn id(&self) -> LanguageModelId {
250 self.id.clone()
251 }
252
253 fn name(&self) -> LanguageModelName {
254 LanguageModelName::from(self.model.display_name().to_string())
255 }
256
257 fn provider_id(&self) -> LanguageModelProviderId {
258 LanguageModelProviderId(PROVIDER_ID.into())
259 }
260
261 fn provider_name(&self) -> LanguageModelProviderName {
262 LanguageModelProviderName(PROVIDER_NAME.into())
263 }
264
265 fn telemetry_id(&self) -> String {
266 format!("openai/{}", self.model.id())
267 }
268
269 fn max_token_count(&self) -> usize {
270 self.model.max_token_count()
271 }
272
273 fn max_output_tokens(&self) -> Option<u32> {
274 self.model.max_output_tokens()
275 }
276
277 fn count_tokens(
278 &self,
279 request: LanguageModelRequest,
280 cx: &App,
281 ) -> BoxFuture<'static, Result<usize>> {
282 count_open_ai_tokens(request, self.model.clone(), cx)
283 }
284
285 fn stream_completion(
286 &self,
287 request: LanguageModelRequest,
288 cx: &AsyncApp,
289 ) -> BoxFuture<
290 'static,
291 Result<futures::stream::BoxStream<'static, Result<LanguageModelCompletionEvent>>>,
292 > {
293 let request = request.into_open_ai(self.model.id().into(), self.max_output_tokens());
294 let completions = self.stream_completion(request, cx);
295 async move {
296 Ok(open_ai::extract_text_from_events(completions.await?)
297 .map(|result| result.map(LanguageModelCompletionEvent::Text))
298 .boxed())
299 }
300 .boxed()
301 }
302
303 fn use_any_tool(
304 &self,
305 request: LanguageModelRequest,
306 tool_name: String,
307 tool_description: String,
308 schema: serde_json::Value,
309 cx: &AsyncApp,
310 ) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<String>>>> {
311 let mut request = request.into_open_ai(self.model.id().into(), self.max_output_tokens());
312 request.tool_choice = Some(ToolChoice::Other(ToolDefinition::Function {
313 function: FunctionDefinition {
314 name: tool_name.clone(),
315 description: None,
316 parameters: None,
317 },
318 }));
319 request.tools = vec![ToolDefinition::Function {
320 function: FunctionDefinition {
321 name: tool_name.clone(),
322 description: Some(tool_description),
323 parameters: Some(schema),
324 },
325 }];
326
327 let response = self.stream_completion(request, cx);
328 self.request_limiter
329 .run(async move {
330 let response = response.await?;
331 Ok(
332 open_ai::extract_tool_args_from_events(tool_name, Box::pin(response))
333 .await?
334 .boxed(),
335 )
336 })
337 .boxed()
338 }
339}
340
341pub fn count_open_ai_tokens(
342 request: LanguageModelRequest,
343 model: open_ai::Model,
344 cx: &App,
345) -> BoxFuture<'static, Result<usize>> {
346 cx.background_executor()
347 .spawn(async move {
348 let messages = request
349 .messages
350 .into_iter()
351 .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
352 role: match message.role {
353 Role::User => "user".into(),
354 Role::Assistant => "assistant".into(),
355 Role::System => "system".into(),
356 },
357 content: Some(message.string_contents()),
358 name: None,
359 function_call: None,
360 })
361 .collect::<Vec<_>>();
362
363 match model {
364 open_ai::Model::Custom { .. }
365 | open_ai::Model::O1Mini
366 | open_ai::Model::O1
367 | open_ai::Model::O3Mini => {
368 tiktoken_rs::num_tokens_from_messages("gpt-4", &messages)
369 }
370 _ => tiktoken_rs::num_tokens_from_messages(model.id(), &messages),
371 }
372 })
373 .boxed()
374}
375
376struct ConfigurationView {
377 api_key_editor: Entity<Editor>,
378 state: gpui::Entity<State>,
379 load_credentials_task: Option<Task<()>>,
380}
381
382impl ConfigurationView {
383 fn new(state: gpui::Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
384 let api_key_editor = cx.new(|cx| {
385 let mut editor = Editor::single_line(window, cx);
386 editor.set_placeholder_text("sk-000000000000000000000000000000000000000000000000", cx);
387 editor
388 });
389
390 cx.observe(&state, |_, _, cx| {
391 cx.notify();
392 })
393 .detach();
394
395 let load_credentials_task = Some(cx.spawn_in(window, {
396 let state = state.clone();
397 |this, mut cx| async move {
398 if let Some(task) = state
399 .update(&mut cx, |state, cx| state.authenticate(cx))
400 .log_err()
401 {
402 // We don't log an error, because "not signed in" is also an error.
403 let _ = task.await;
404 }
405
406 this.update(&mut cx, |this, cx| {
407 this.load_credentials_task = None;
408 cx.notify();
409 })
410 .log_err();
411 }
412 }));
413
414 Self {
415 api_key_editor,
416 state,
417 load_credentials_task,
418 }
419 }
420
421 fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
422 let api_key = self.api_key_editor.read(cx).text(cx);
423 if api_key.is_empty() {
424 return;
425 }
426
427 let state = self.state.clone();
428 cx.spawn_in(window, |_, mut cx| async move {
429 state
430 .update(&mut cx, |state, cx| state.set_api_key(api_key, cx))?
431 .await
432 })
433 .detach_and_log_err(cx);
434
435 cx.notify();
436 }
437
438 fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context<Self>) {
439 self.api_key_editor
440 .update(cx, |editor, cx| editor.set_text("", window, cx));
441
442 let state = self.state.clone();
443 cx.spawn_in(window, |_, mut cx| async move {
444 state
445 .update(&mut cx, |state, cx| state.reset_api_key(cx))?
446 .await
447 })
448 .detach_and_log_err(cx);
449
450 cx.notify();
451 }
452
453 fn render_api_key_editor(&self, cx: &mut Context<Self>) -> impl IntoElement {
454 let settings = ThemeSettings::get_global(cx);
455 let text_style = TextStyle {
456 color: cx.theme().colors().text,
457 font_family: settings.ui_font.family.clone(),
458 font_features: settings.ui_font.features.clone(),
459 font_fallbacks: settings.ui_font.fallbacks.clone(),
460 font_size: rems(0.875).into(),
461 font_weight: settings.ui_font.weight,
462 font_style: FontStyle::Normal,
463 line_height: relative(1.3),
464 white_space: WhiteSpace::Normal,
465 ..Default::default()
466 };
467 EditorElement::new(
468 &self.api_key_editor,
469 EditorStyle {
470 background: cx.theme().colors().editor_background,
471 local_player: cx.theme().players().local(),
472 text: text_style,
473 ..Default::default()
474 },
475 )
476 }
477
478 fn should_render_editor(&self, cx: &mut Context<Self>) -> bool {
479 !self.state.read(cx).is_authenticated()
480 }
481}
482
483impl Render for ConfigurationView {
484 fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
485 const OPENAI_CONSOLE_URL: &str = "https://platform.openai.com/api-keys";
486 const INSTRUCTIONS: [&str; 4] = [
487 "To use Zed's assistant with OpenAI, you need to add an API key. Follow these steps:",
488 " - Create one by visiting:",
489 " - Ensure your OpenAI account has credits",
490 " - Paste your API key below and hit enter to start using the assistant",
491 ];
492
493 let env_var_set = self.state.read(cx).api_key_from_env;
494
495 if self.load_credentials_task.is_some() {
496 div().child(Label::new("Loading credentials...")).into_any()
497 } else if self.should_render_editor(cx) {
498 v_flex()
499 .size_full()
500 .on_action(cx.listener(Self::save_api_key))
501 .child(Label::new(INSTRUCTIONS[0]))
502 .child(h_flex().child(Label::new(INSTRUCTIONS[1])).child(
503 Button::new("openai_console", OPENAI_CONSOLE_URL)
504 .style(ButtonStyle::Subtle)
505 .icon(IconName::ArrowUpRight)
506 .icon_size(IconSize::XSmall)
507 .icon_color(Color::Muted)
508 .on_click(move |_, _, cx| cx.open_url(OPENAI_CONSOLE_URL))
509 )
510 )
511 .children(
512 (2..INSTRUCTIONS.len()).map(|n|
513 Label::new(INSTRUCTIONS[n])).collect::<Vec<_>>())
514 .child(
515 h_flex()
516 .w_full()
517 .my_2()
518 .px_2()
519 .py_1()
520 .bg(cx.theme().colors().editor_background)
521 .border_1()
522 .border_color(cx.theme().colors().border_variant)
523 .rounded_md()
524 .child(self.render_api_key_editor(cx)),
525 )
526 .child(
527 Label::new(
528 format!("You can also assign the {OPENAI_API_KEY_VAR} environment variable and restart Zed."),
529 )
530 .size(LabelSize::Small),
531 )
532 .child(
533 Label::new(
534 "Note that having a subscription for another service like GitHub Copilot won't work.".to_string(),
535 )
536 .size(LabelSize::Small),
537 )
538 .into_any()
539 } else {
540 h_flex()
541 .size_full()
542 .justify_between()
543 .child(
544 h_flex()
545 .gap_1()
546 .child(Icon::new(IconName::Check).color(Color::Success))
547 .child(Label::new(if env_var_set {
548 format!("API key set in {OPENAI_API_KEY_VAR} environment variable.")
549 } else {
550 "API key configured.".to_string()
551 })),
552 )
553 .child(
554 Button::new("reset-key", "Reset key")
555 .icon(Some(IconName::Trash))
556 .icon_size(IconSize::Small)
557 .icon_position(IconPosition::Start)
558 .disabled(env_var_set)
559 .when(env_var_set, |this| {
560 this.tooltip(Tooltip::text(format!("To reset your API key, unset the {OPENAI_API_KEY_VAR} environment variable.")))
561 })
562 .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx))),
563 )
564 .into_any()
565 }
566 }
567}