1use anyhow::{anyhow, bail, Result};
2use collections::BTreeMap;
3use editor::{Editor, EditorElement, EditorStyle};
4use futures::{future::BoxFuture, FutureExt, StreamExt};
5use gpui::{
6 AnyView, AppContext, AsyncAppContext, FontStyle, ModelContext, Subscription, Task, TextStyle,
7 View, WhiteSpace,
8};
9use http_client::HttpClient;
10use open_ai::{
11 stream_completion, FunctionDefinition, ResponseStreamEvent, ToolChoice, ToolDefinition,
12};
13use schemars::JsonSchema;
14use serde::{Deserialize, Serialize};
15use settings::{Settings, SettingsStore};
16use std::{sync::Arc, time::Duration};
17use strum::IntoEnumIterator;
18use theme::ThemeSettings;
19use ui::{prelude::*, Indicator};
20use util::ResultExt;
21
22use crate::{
23 settings::AllLanguageModelSettings, LanguageModel, LanguageModelId, LanguageModelName,
24 LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName,
25 LanguageModelProviderState, LanguageModelRequest, RateLimiter, Role,
26};
27
28const PROVIDER_ID: &str = "openai";
29const PROVIDER_NAME: &str = "OpenAI";
30
31#[derive(Default, Clone, Debug, PartialEq)]
32pub struct OpenAiSettings {
33 pub api_url: String,
34 pub low_speed_timeout: Option<Duration>,
35 pub available_models: Vec<AvailableModel>,
36 pub needs_setting_migration: bool,
37}
38
39#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
40pub struct AvailableModel {
41 pub name: String,
42 pub max_tokens: usize,
43}
44
45pub struct OpenAiLanguageModelProvider {
46 http_client: Arc<dyn HttpClient>,
47 state: gpui::Model<State>,
48}
49
50pub struct State {
51 api_key: Option<String>,
52 _subscription: Subscription,
53}
54
55impl State {
56 fn is_authenticated(&self) -> bool {
57 self.api_key.is_some()
58 }
59
60 fn reset_api_key(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
61 let settings = &AllLanguageModelSettings::get_global(cx).openai;
62 let delete_credentials = cx.delete_credentials(&settings.api_url);
63 cx.spawn(|this, mut cx| async move {
64 delete_credentials.await.log_err();
65 this.update(&mut cx, |this, cx| {
66 this.api_key = None;
67 cx.notify();
68 })
69 })
70 }
71
72 fn set_api_key(&mut self, api_key: String, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
73 let settings = &AllLanguageModelSettings::get_global(cx).openai;
74 let write_credentials =
75 cx.write_credentials(&settings.api_url, "Bearer", api_key.as_bytes());
76
77 cx.spawn(|this, mut cx| async move {
78 write_credentials.await?;
79 this.update(&mut cx, |this, cx| {
80 this.api_key = Some(api_key);
81 cx.notify();
82 })
83 })
84 }
85
86 fn authenticate(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
87 if self.is_authenticated() {
88 Task::ready(Ok(()))
89 } else {
90 let api_url = AllLanguageModelSettings::get_global(cx)
91 .openai
92 .api_url
93 .clone();
94 cx.spawn(|this, mut cx| async move {
95 let api_key = if let Ok(api_key) = std::env::var("OPENAI_API_KEY") {
96 api_key
97 } else {
98 let (_, api_key) = cx
99 .update(|cx| cx.read_credentials(&api_url))?
100 .await?
101 .ok_or_else(|| anyhow!("credentials not found"))?;
102 String::from_utf8(api_key)?
103 };
104 this.update(&mut cx, |this, cx| {
105 this.api_key = Some(api_key);
106 cx.notify();
107 })
108 })
109 }
110 }
111}
112
113impl OpenAiLanguageModelProvider {
114 pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut AppContext) -> Self {
115 let state = cx.new_model(|cx| State {
116 api_key: None,
117 _subscription: cx.observe_global::<SettingsStore>(|_this: &mut State, cx| {
118 cx.notify();
119 }),
120 });
121
122 Self { http_client, state }
123 }
124}
125
126impl LanguageModelProviderState for OpenAiLanguageModelProvider {
127 type ObservableEntity = State;
128
129 fn observable_entity(&self) -> Option<gpui::Model<Self::ObservableEntity>> {
130 Some(self.state.clone())
131 }
132}
133
134impl LanguageModelProvider for OpenAiLanguageModelProvider {
135 fn id(&self) -> LanguageModelProviderId {
136 LanguageModelProviderId(PROVIDER_ID.into())
137 }
138
139 fn name(&self) -> LanguageModelProviderName {
140 LanguageModelProviderName(PROVIDER_NAME.into())
141 }
142
143 fn icon(&self) -> IconName {
144 IconName::AiOpenAi
145 }
146
147 fn provided_models(&self, cx: &AppContext) -> Vec<Arc<dyn LanguageModel>> {
148 let mut models = BTreeMap::default();
149
150 // Add base models from open_ai::Model::iter()
151 for model in open_ai::Model::iter() {
152 if !matches!(model, open_ai::Model::Custom { .. }) {
153 models.insert(model.id().to_string(), model);
154 }
155 }
156
157 // Override with available models from settings
158 for model in &AllLanguageModelSettings::get_global(cx)
159 .openai
160 .available_models
161 {
162 models.insert(
163 model.name.clone(),
164 open_ai::Model::Custom {
165 name: model.name.clone(),
166 max_tokens: model.max_tokens,
167 },
168 );
169 }
170
171 models
172 .into_values()
173 .map(|model| {
174 Arc::new(OpenAiLanguageModel {
175 id: LanguageModelId::from(model.id().to_string()),
176 model,
177 state: self.state.clone(),
178 http_client: self.http_client.clone(),
179 request_limiter: RateLimiter::new(4),
180 }) as Arc<dyn LanguageModel>
181 })
182 .collect()
183 }
184
185 fn is_authenticated(&self, cx: &AppContext) -> bool {
186 self.state.read(cx).is_authenticated()
187 }
188
189 fn authenticate(&self, cx: &mut AppContext) -> Task<Result<()>> {
190 self.state.update(cx, |state, cx| state.authenticate(cx))
191 }
192
193 fn configuration_view(&self, cx: &mut WindowContext) -> AnyView {
194 cx.new_view(|cx| ConfigurationView::new(self.state.clone(), cx))
195 .into()
196 }
197
198 fn reset_credentials(&self, cx: &mut AppContext) -> Task<Result<()>> {
199 self.state.update(cx, |state, cx| state.reset_api_key(cx))
200 }
201}
202
203pub struct OpenAiLanguageModel {
204 id: LanguageModelId,
205 model: open_ai::Model,
206 state: gpui::Model<State>,
207 http_client: Arc<dyn HttpClient>,
208 request_limiter: RateLimiter,
209}
210
211impl OpenAiLanguageModel {
212 fn stream_completion(
213 &self,
214 request: open_ai::Request,
215 cx: &AsyncAppContext,
216 ) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<ResponseStreamEvent>>>>
217 {
218 let http_client = self.http_client.clone();
219 let Ok((api_key, api_url, low_speed_timeout)) = cx.read_model(&self.state, |state, cx| {
220 let settings = &AllLanguageModelSettings::get_global(cx).openai;
221 (
222 state.api_key.clone(),
223 settings.api_url.clone(),
224 settings.low_speed_timeout,
225 )
226 }) else {
227 return futures::future::ready(Err(anyhow!("App state dropped"))).boxed();
228 };
229
230 let future = self.request_limiter.stream(async move {
231 let api_key = api_key.ok_or_else(|| anyhow!("missing api key"))?;
232 let request = stream_completion(
233 http_client.as_ref(),
234 &api_url,
235 &api_key,
236 request,
237 low_speed_timeout,
238 );
239 let response = request.await?;
240 Ok(response)
241 });
242
243 async move { Ok(future.await?.boxed()) }.boxed()
244 }
245}
246impl LanguageModel for OpenAiLanguageModel {
247 fn id(&self) -> LanguageModelId {
248 self.id.clone()
249 }
250
251 fn name(&self) -> LanguageModelName {
252 LanguageModelName::from(self.model.display_name().to_string())
253 }
254
255 fn provider_id(&self) -> LanguageModelProviderId {
256 LanguageModelProviderId(PROVIDER_ID.into())
257 }
258
259 fn provider_name(&self) -> LanguageModelProviderName {
260 LanguageModelProviderName(PROVIDER_NAME.into())
261 }
262
263 fn telemetry_id(&self) -> String {
264 format!("openai/{}", self.model.id())
265 }
266
267 fn max_token_count(&self) -> usize {
268 self.model.max_token_count()
269 }
270
271 fn count_tokens(
272 &self,
273 request: LanguageModelRequest,
274 cx: &AppContext,
275 ) -> BoxFuture<'static, Result<usize>> {
276 count_open_ai_tokens(request, self.model.clone(), cx)
277 }
278
279 fn stream_completion(
280 &self,
281 request: LanguageModelRequest,
282 cx: &AsyncAppContext,
283 ) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<String>>>> {
284 let request = request.into_open_ai(self.model.id().into());
285 let completions = self.stream_completion(request, cx);
286 async move { Ok(open_ai::extract_text_from_events(completions.await?).boxed()) }.boxed()
287 }
288
289 fn use_any_tool(
290 &self,
291 request: LanguageModelRequest,
292 tool_name: String,
293 tool_description: String,
294 schema: serde_json::Value,
295 cx: &AsyncAppContext,
296 ) -> BoxFuture<'static, Result<serde_json::Value>> {
297 let mut request = request.into_open_ai(self.model.id().into());
298 let mut function = FunctionDefinition {
299 name: tool_name.clone(),
300 description: None,
301 parameters: None,
302 };
303 let func = ToolDefinition::Function {
304 function: function.clone(),
305 };
306 request.tool_choice = Some(ToolChoice::Other(func.clone()));
307 // Fill in description and params separately, as they're not needed for tool_choice field.
308 function.description = Some(tool_description);
309 function.parameters = Some(schema);
310 request.tools = vec![ToolDefinition::Function { function }];
311 let response = self.stream_completion(request, cx);
312 self.request_limiter
313 .run(async move {
314 let mut response = response.await?;
315
316 // Call arguments are gonna be streamed in over multiple chunks.
317 let mut load_state = None;
318 while let Some(Ok(part)) = response.next().await {
319 for choice in part.choices {
320 let Some(tool_calls) = choice.delta.tool_calls else {
321 continue;
322 };
323
324 for call in tool_calls {
325 if let Some(func) = call.function {
326 if func.name.as_deref() == Some(tool_name.as_str()) {
327 load_state = Some((String::default(), call.index));
328 }
329 if let Some((arguments, (output, index))) =
330 func.arguments.zip(load_state.as_mut())
331 {
332 if call.index == *index {
333 output.push_str(&arguments);
334 }
335 }
336 }
337 }
338 }
339 }
340 if let Some((arguments, _)) = load_state {
341 return Ok(serde_json::from_str(&arguments)?);
342 } else {
343 bail!("tool not used");
344 }
345 })
346 .boxed()
347 }
348}
349
350pub fn count_open_ai_tokens(
351 request: LanguageModelRequest,
352 model: open_ai::Model,
353 cx: &AppContext,
354) -> BoxFuture<'static, Result<usize>> {
355 cx.background_executor()
356 .spawn(async move {
357 let messages = request
358 .messages
359 .into_iter()
360 .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
361 role: match message.role {
362 Role::User => "user".into(),
363 Role::Assistant => "assistant".into(),
364 Role::System => "system".into(),
365 },
366 content: Some(message.content),
367 name: None,
368 function_call: None,
369 })
370 .collect::<Vec<_>>();
371
372 if let open_ai::Model::Custom { .. } = model {
373 tiktoken_rs::num_tokens_from_messages("gpt-4", &messages)
374 } else {
375 tiktoken_rs::num_tokens_from_messages(model.id(), &messages)
376 }
377 })
378 .boxed()
379}
380
381struct ConfigurationView {
382 api_key_editor: View<Editor>,
383 state: gpui::Model<State>,
384 load_credentials_task: Option<Task<()>>,
385}
386
387impl ConfigurationView {
388 fn new(state: gpui::Model<State>, cx: &mut ViewContext<Self>) -> Self {
389 let api_key_editor = cx.new_view(|cx| {
390 let mut editor = Editor::single_line(cx);
391 editor.set_placeholder_text("sk-000000000000000000000000000000000000000000000000", cx);
392 editor
393 });
394
395 cx.observe(&state, |_, _, cx| {
396 cx.notify();
397 })
398 .detach();
399
400 let load_credentials_task = Some(cx.spawn({
401 let state = state.clone();
402 |this, mut cx| async move {
403 if let Some(task) = state
404 .update(&mut cx, |state, cx| state.authenticate(cx))
405 .log_err()
406 {
407 // We don't log an error, because "not signed in" is also an error.
408 let _ = task.await;
409 }
410
411 this.update(&mut cx, |this, cx| {
412 this.load_credentials_task = None;
413 cx.notify();
414 })
415 .log_err();
416 }
417 }));
418
419 Self {
420 api_key_editor,
421 state,
422 load_credentials_task,
423 }
424 }
425
426 fn save_api_key(&mut self, _: &menu::Confirm, cx: &mut ViewContext<Self>) {
427 let api_key = self.api_key_editor.read(cx).text(cx);
428 if api_key.is_empty() {
429 return;
430 }
431
432 let state = self.state.clone();
433 cx.spawn(|_, mut cx| async move {
434 state
435 .update(&mut cx, |state, cx| state.set_api_key(api_key, cx))?
436 .await
437 })
438 .detach_and_log_err(cx);
439
440 cx.notify();
441 }
442
443 fn reset_api_key(&mut self, cx: &mut ViewContext<Self>) {
444 self.api_key_editor
445 .update(cx, |editor, cx| editor.set_text("", cx));
446
447 let state = self.state.clone();
448 cx.spawn(|_, mut cx| async move {
449 state
450 .update(&mut cx, |state, cx| state.reset_api_key(cx))?
451 .await
452 })
453 .detach_and_log_err(cx);
454
455 cx.notify();
456 }
457
458 fn render_api_key_editor(&self, cx: &mut ViewContext<Self>) -> impl IntoElement {
459 let settings = ThemeSettings::get_global(cx);
460 let text_style = TextStyle {
461 color: cx.theme().colors().text,
462 font_family: settings.ui_font.family.clone(),
463 font_features: settings.ui_font.features.clone(),
464 font_fallbacks: settings.ui_font.fallbacks.clone(),
465 font_size: rems(0.875).into(),
466 font_weight: settings.ui_font.weight,
467 font_style: FontStyle::Normal,
468 line_height: relative(1.3),
469 background_color: None,
470 underline: None,
471 strikethrough: None,
472 white_space: WhiteSpace::Normal,
473 };
474 EditorElement::new(
475 &self.api_key_editor,
476 EditorStyle {
477 background: cx.theme().colors().editor_background,
478 local_player: cx.theme().players().local(),
479 text: text_style,
480 ..Default::default()
481 },
482 )
483 }
484
485 fn should_render_editor(&self, cx: &mut ViewContext<Self>) -> bool {
486 !self.state.read(cx).is_authenticated()
487 }
488}
489
490impl Render for ConfigurationView {
491 fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
492 const INSTRUCTIONS: [&str; 6] = [
493 "To use the assistant panel or inline assistant, you need to add your OpenAI API key.",
494 " - You can create an API key at: platform.openai.com/api-keys",
495 " - Make sure your OpenAI account has credits",
496 " - Having a subscription for another service like GitHub Copilot won't work.",
497 "",
498 "Paste your OpenAI API key below and hit enter to use the assistant:",
499 ];
500
501 if self.load_credentials_task.is_some() {
502 div().child(Label::new("Loading credentials...")).into_any()
503 } else if self.should_render_editor(cx) {
504 v_flex()
505 .size_full()
506 .on_action(cx.listener(Self::save_api_key))
507 .children(
508 INSTRUCTIONS.map(|instruction| Label::new(instruction).size(LabelSize::Small)),
509 )
510 .child(
511 h_flex()
512 .w_full()
513 .my_2()
514 .px_2()
515 .py_1()
516 .bg(cx.theme().colors().editor_background)
517 .rounded_md()
518 .child(self.render_api_key_editor(cx)),
519 )
520 .child(
521 Label::new(
522 "You can also assign the OPENAI_API_KEY environment variable and restart Zed.",
523 )
524 .size(LabelSize::Small),
525 )
526 .into_any()
527 } else {
528 h_flex()
529 .size_full()
530 .justify_between()
531 .child(
532 h_flex()
533 .gap_2()
534 .child(Indicator::dot().color(Color::Success))
535 .child(Label::new("API key configured").size(LabelSize::Small)),
536 )
537 .child(
538 Button::new("reset-key", "Reset key")
539 .icon(Some(IconName::Trash))
540 .icon_size(IconSize::Small)
541 .icon_position(IconPosition::Start)
542 .on_click(cx.listener(|this, _, cx| this.reset_api_key(cx))),
543 )
544 .into_any()
545 }
546 }
547}