1use anyhow::{Context as _, Result, anyhow};
2use credentials_provider::CredentialsProvider;
3
4use convert_case::{Case, Casing};
5use futures::{FutureExt, StreamExt, future::BoxFuture};
6use gpui::{AnyView, App, AsyncApp, Context, Entity, Subscription, Task, Window};
7use http_client::HttpClient;
8use language_model::{
9 AuthenticateError, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
10 LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
11 LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
12 LanguageModelToolChoice, RateLimiter,
13};
14use menu;
15use open_ai::{ResponseStreamEvent, stream_completion};
16use schemars::JsonSchema;
17use serde::{Deserialize, Serialize};
18use settings::{Settings, SettingsStore};
19use std::sync::Arc;
20
21use ui::{ElevationIndex, Tooltip, prelude::*};
22use ui_input::SingleLineInput;
23use util::ResultExt;
24
25use crate::AllLanguageModelSettings;
26use crate::provider::open_ai::{OpenAiEventMapper, into_open_ai};
27
28#[derive(Default, Clone, Debug, PartialEq)]
29pub struct OpenAiCompatibleSettings {
30 pub api_url: String,
31 pub available_models: Vec<AvailableModel>,
32}
33
34#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
35pub struct AvailableModel {
36 pub name: String,
37 pub display_name: Option<String>,
38 pub max_tokens: u64,
39 pub max_output_tokens: Option<u64>,
40 pub max_completion_tokens: Option<u64>,
41}
42
43pub struct OpenAiCompatibleLanguageModelProvider {
44 id: LanguageModelProviderId,
45 name: LanguageModelProviderName,
46 http_client: Arc<dyn HttpClient>,
47 state: gpui::Entity<State>,
48}
49
50pub struct State {
51 id: Arc<str>,
52 env_var_name: Arc<str>,
53 api_key: Option<String>,
54 api_key_from_env: bool,
55 settings: OpenAiCompatibleSettings,
56 _subscription: Subscription,
57}
58
59impl State {
60 fn is_authenticated(&self) -> bool {
61 self.api_key.is_some()
62 }
63
64 fn reset_api_key(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
65 let credentials_provider = <dyn CredentialsProvider>::global(cx);
66 let api_url = self.settings.api_url.clone();
67 cx.spawn(async move |this, cx| {
68 credentials_provider
69 .delete_credentials(&api_url, &cx)
70 .await
71 .log_err();
72 this.update(cx, |this, cx| {
73 this.api_key = None;
74 this.api_key_from_env = false;
75 cx.notify();
76 })
77 })
78 }
79
80 fn set_api_key(&mut self, api_key: String, cx: &mut Context<Self>) -> Task<Result<()>> {
81 let credentials_provider = <dyn CredentialsProvider>::global(cx);
82 let api_url = self.settings.api_url.clone();
83 cx.spawn(async move |this, cx| {
84 credentials_provider
85 .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx)
86 .await
87 .log_err();
88 this.update(cx, |this, cx| {
89 this.api_key = Some(api_key);
90 cx.notify();
91 })
92 })
93 }
94
95 fn authenticate(&self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
96 if self.is_authenticated() {
97 return Task::ready(Ok(()));
98 }
99
100 let credentials_provider = <dyn CredentialsProvider>::global(cx);
101 let env_var_name = self.env_var_name.clone();
102 let api_url = self.settings.api_url.clone();
103 cx.spawn(async move |this, cx| {
104 let (api_key, from_env) = if let Ok(api_key) = std::env::var(env_var_name.as_ref()) {
105 (api_key, true)
106 } else {
107 let (_, api_key) = credentials_provider
108 .read_credentials(&api_url, &cx)
109 .await?
110 .ok_or(AuthenticateError::CredentialsNotFound)?;
111 (
112 String::from_utf8(api_key).context("invalid {PROVIDER_NAME} API key")?,
113 false,
114 )
115 };
116 this.update(cx, |this, cx| {
117 this.api_key = Some(api_key);
118 this.api_key_from_env = from_env;
119 cx.notify();
120 })?;
121
122 Ok(())
123 })
124 }
125}
126
127impl OpenAiCompatibleLanguageModelProvider {
128 pub fn new(id: Arc<str>, http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
129 fn resolve_settings<'a>(id: &'a str, cx: &'a App) -> Option<&'a OpenAiCompatibleSettings> {
130 AllLanguageModelSettings::get_global(cx)
131 .openai_compatible
132 .get(id)
133 }
134
135 let state = cx.new(|cx| State {
136 id: id.clone(),
137 env_var_name: format!("{}_API_KEY", id).to_case(Case::Constant).into(),
138 settings: resolve_settings(&id, cx).cloned().unwrap_or_default(),
139 api_key: None,
140 api_key_from_env: false,
141 _subscription: cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
142 let Some(settings) = resolve_settings(&this.id, cx) else {
143 return;
144 };
145 if &this.settings != settings {
146 this.settings = settings.clone();
147 cx.notify();
148 }
149 }),
150 });
151
152 Self {
153 id: id.clone().into(),
154 name: id.into(),
155 http_client,
156 state,
157 }
158 }
159
160 fn create_language_model(&self, model: AvailableModel) -> Arc<dyn LanguageModel> {
161 Arc::new(OpenAiCompatibleLanguageModel {
162 id: LanguageModelId::from(model.name.clone()),
163 provider_id: self.id.clone(),
164 provider_name: self.name.clone(),
165 model,
166 state: self.state.clone(),
167 http_client: self.http_client.clone(),
168 request_limiter: RateLimiter::new(4),
169 })
170 }
171}
172
173impl LanguageModelProviderState for OpenAiCompatibleLanguageModelProvider {
174 type ObservableEntity = State;
175
176 fn observable_entity(&self) -> Option<gpui::Entity<Self::ObservableEntity>> {
177 Some(self.state.clone())
178 }
179}
180
181impl LanguageModelProvider for OpenAiCompatibleLanguageModelProvider {
182 fn id(&self) -> LanguageModelProviderId {
183 self.id.clone()
184 }
185
186 fn name(&self) -> LanguageModelProviderName {
187 self.name.clone()
188 }
189
190 fn icon(&self) -> IconName {
191 IconName::AiOpenAiCompat
192 }
193
194 fn default_model(&self, cx: &App) -> Option<Arc<dyn LanguageModel>> {
195 self.state
196 .read(cx)
197 .settings
198 .available_models
199 .first()
200 .map(|model| self.create_language_model(model.clone()))
201 }
202
203 fn default_fast_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
204 None
205 }
206
207 fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
208 self.state
209 .read(cx)
210 .settings
211 .available_models
212 .iter()
213 .map(|model| self.create_language_model(model.clone()))
214 .collect()
215 }
216
217 fn is_authenticated(&self, cx: &App) -> bool {
218 self.state.read(cx).is_authenticated()
219 }
220
221 fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
222 self.state.update(cx, |state, cx| state.authenticate(cx))
223 }
224
225 fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView {
226 cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx))
227 .into()
228 }
229
230 fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
231 self.state.update(cx, |state, cx| state.reset_api_key(cx))
232 }
233}
234
235pub struct OpenAiCompatibleLanguageModel {
236 id: LanguageModelId,
237 provider_id: LanguageModelProviderId,
238 provider_name: LanguageModelProviderName,
239 model: AvailableModel,
240 state: gpui::Entity<State>,
241 http_client: Arc<dyn HttpClient>,
242 request_limiter: RateLimiter,
243}
244
245impl OpenAiCompatibleLanguageModel {
246 fn stream_completion(
247 &self,
248 request: open_ai::Request,
249 cx: &AsyncApp,
250 ) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<ResponseStreamEvent>>>>
251 {
252 let http_client = self.http_client.clone();
253 let Ok((api_key, api_url)) = cx.read_entity(&self.state, |state, _| {
254 (state.api_key.clone(), state.settings.api_url.clone())
255 }) else {
256 return futures::future::ready(Err(anyhow!("App state dropped"))).boxed();
257 };
258
259 let provider = self.provider_name.clone();
260 let future = self.request_limiter.stream(async move {
261 let Some(api_key) = api_key else {
262 return Err(LanguageModelCompletionError::NoApiKey { provider });
263 };
264 let request = stream_completion(http_client.as_ref(), &api_url, &api_key, request);
265 let response = request.await?;
266 Ok(response)
267 });
268
269 async move { Ok(future.await?.boxed()) }.boxed()
270 }
271}
272
273impl LanguageModel for OpenAiCompatibleLanguageModel {
274 fn id(&self) -> LanguageModelId {
275 self.id.clone()
276 }
277
278 fn name(&self) -> LanguageModelName {
279 LanguageModelName::from(
280 self.model
281 .display_name
282 .clone()
283 .unwrap_or_else(|| self.model.name.clone()),
284 )
285 }
286
287 fn provider_id(&self) -> LanguageModelProviderId {
288 self.provider_id.clone()
289 }
290
291 fn provider_name(&self) -> LanguageModelProviderName {
292 self.provider_name.clone()
293 }
294
295 fn supports_tools(&self) -> bool {
296 true
297 }
298
299 fn supports_images(&self) -> bool {
300 false
301 }
302
303 fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
304 match choice {
305 LanguageModelToolChoice::Auto => true,
306 LanguageModelToolChoice::Any => true,
307 LanguageModelToolChoice::None => true,
308 }
309 }
310
311 fn telemetry_id(&self) -> String {
312 format!("openai/{}", self.model.name)
313 }
314
315 fn max_token_count(&self) -> u64 {
316 self.model.max_tokens
317 }
318
319 fn max_output_tokens(&self) -> Option<u64> {
320 self.model.max_output_tokens
321 }
322
323 fn count_tokens(
324 &self,
325 request: LanguageModelRequest,
326 cx: &App,
327 ) -> BoxFuture<'static, Result<u64>> {
328 let max_token_count = self.max_token_count();
329 cx.background_spawn(async move {
330 let messages = super::open_ai::collect_tiktoken_messages(request);
331 let model = if max_token_count >= 100_000 {
332 // If the max tokens is 100k or more, it is likely the o200k_base tokenizer from gpt4o
333 "gpt-4o"
334 } else {
335 // Otherwise fallback to gpt-4, since only cl100k_base and o200k_base are
336 // supported with this tiktoken method
337 "gpt-4"
338 };
339 tiktoken_rs::num_tokens_from_messages(model, &messages).map(|tokens| tokens as u64)
340 })
341 .boxed()
342 }
343
344 fn stream_completion(
345 &self,
346 request: LanguageModelRequest,
347 cx: &AsyncApp,
348 ) -> BoxFuture<
349 'static,
350 Result<
351 futures::stream::BoxStream<
352 'static,
353 Result<LanguageModelCompletionEvent, LanguageModelCompletionError>,
354 >,
355 LanguageModelCompletionError,
356 >,
357 > {
358 let supports_parallel_tool_call = true;
359 let supports_prompt_cache_key = false;
360 let request = into_open_ai(
361 request,
362 &self.model.name,
363 supports_parallel_tool_call,
364 supports_prompt_cache_key,
365 self.max_output_tokens(),
366 None,
367 );
368 let completions = self.stream_completion(request, cx);
369 async move {
370 let mapper = OpenAiEventMapper::new();
371 Ok(mapper.map_stream(completions.await?).boxed())
372 }
373 .boxed()
374 }
375}
376
377struct ConfigurationView {
378 api_key_editor: Entity<SingleLineInput>,
379 state: gpui::Entity<State>,
380 load_credentials_task: Option<Task<()>>,
381}
382
383impl ConfigurationView {
384 fn new(state: gpui::Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
385 let api_key_editor = cx.new(|cx| {
386 SingleLineInput::new(
387 window,
388 cx,
389 "000000000000000000000000000000000000000000000000000",
390 )
391 });
392
393 cx.observe(&state, |_, _, cx| {
394 cx.notify();
395 })
396 .detach();
397
398 let load_credentials_task = Some(cx.spawn_in(window, {
399 let state = state.clone();
400 async move |this, cx| {
401 if let Some(task) = state
402 .update(cx, |state, cx| state.authenticate(cx))
403 .log_err()
404 {
405 // We don't log an error, because "not signed in" is also an error.
406 let _ = task.await;
407 }
408 this.update(cx, |this, cx| {
409 this.load_credentials_task = None;
410 cx.notify();
411 })
412 .log_err();
413 }
414 }));
415
416 Self {
417 api_key_editor,
418 state,
419 load_credentials_task,
420 }
421 }
422
423 fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
424 let api_key = self
425 .api_key_editor
426 .read(cx)
427 .editor()
428 .read(cx)
429 .text(cx)
430 .trim()
431 .to_string();
432
433 // Don't proceed if no API key is provided and we're not authenticated
434 if api_key.is_empty() && !self.state.read(cx).is_authenticated() {
435 return;
436 }
437
438 let state = self.state.clone();
439 cx.spawn_in(window, async move |_, cx| {
440 state
441 .update(cx, |state, cx| state.set_api_key(api_key, cx))?
442 .await
443 })
444 .detach_and_log_err(cx);
445
446 cx.notify();
447 }
448
449 fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context<Self>) {
450 self.api_key_editor.update(cx, |input, cx| {
451 input.editor.update(cx, |editor, cx| {
452 editor.set_text("", window, cx);
453 });
454 });
455
456 let state = self.state.clone();
457 cx.spawn_in(window, async move |_, cx| {
458 state.update(cx, |state, cx| state.reset_api_key(cx))?.await
459 })
460 .detach_and_log_err(cx);
461
462 cx.notify();
463 }
464
465 fn should_render_editor(&self, cx: &mut Context<Self>) -> bool {
466 !self.state.read(cx).is_authenticated()
467 }
468}
469
470impl Render for ConfigurationView {
471 fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
472 let env_var_set = self.state.read(cx).api_key_from_env;
473 let env_var_name = self.state.read(cx).env_var_name.clone();
474
475 let api_key_section = if self.should_render_editor(cx) {
476 v_flex()
477 .on_action(cx.listener(Self::save_api_key))
478 .child(Label::new("To use Zed's agent with an OpenAI-compatible provider, you need to add an API key."))
479 .child(
480 div()
481 .pt(DynamicSpacing::Base04.rems(cx))
482 .child(self.api_key_editor.clone())
483 )
484 .child(
485 Label::new(
486 format!("You can also assign the {env_var_name} environment variable and restart Zed."),
487 )
488 .size(LabelSize::Small).color(Color::Muted),
489 )
490 .into_any()
491 } else {
492 h_flex()
493 .mt_1()
494 .p_1()
495 .justify_between()
496 .rounded_md()
497 .border_1()
498 .border_color(cx.theme().colors().border)
499 .bg(cx.theme().colors().background)
500 .child(
501 h_flex()
502 .gap_1()
503 .child(Icon::new(IconName::Check).color(Color::Success))
504 .child(Label::new(if env_var_set {
505 format!("API key set in {env_var_name} environment variable.")
506 } else {
507 "API key configured.".to_string()
508 })),
509 )
510 .child(
511 Button::new("reset-api-key", "Reset API Key")
512 .label_size(LabelSize::Small)
513 .icon(IconName::Undo)
514 .icon_size(IconSize::Small)
515 .icon_position(IconPosition::Start)
516 .layer(ElevationIndex::ModalSurface)
517 .when(env_var_set, |this| {
518 this.tooltip(Tooltip::text(format!("To reset your API key, unset the {env_var_name} environment variable.")))
519 })
520 .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx))),
521 )
522 .into_any()
523 };
524
525 if self.load_credentials_task.is_some() {
526 div().child(Label::new("Loading credentials…")).into_any()
527 } else {
528 v_flex().size_full().child(api_key_section).into_any()
529 }
530 }
531}