1use anyhow::{Context as _, Result, anyhow};
2use credentials_provider::CredentialsProvider;
3
4use convert_case::{Case, Casing};
5use futures::{FutureExt, StreamExt, future::BoxFuture};
6use gpui::{AnyView, App, AsyncApp, Context, Entity, Subscription, Task, Window};
7use http_client::HttpClient;
8use language_model::{
9 AuthenticateError, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
10 LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
11 LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
12 LanguageModelToolChoice, LanguageModelToolSchemaFormat, RateLimiter,
13};
14use menu;
15use open_ai::{ResponseStreamEvent, stream_completion};
16use schemars::JsonSchema;
17use serde::{Deserialize, Serialize};
18use settings::{Settings, SettingsStore};
19use std::sync::Arc;
20
21use ui::{ElevationIndex, Tooltip, prelude::*};
22use ui_input::SingleLineInput;
23use util::ResultExt;
24
25use crate::AllLanguageModelSettings;
26use crate::provider::open_ai::{OpenAiEventMapper, into_open_ai};
27
28#[derive(Default, Clone, Debug, PartialEq)]
29pub struct OpenAiCompatibleSettings {
30 pub api_url: String,
31 pub available_models: Vec<AvailableModel>,
32}
33
34#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
35pub struct AvailableModel {
36 pub name: String,
37 pub display_name: Option<String>,
38 pub max_tokens: u64,
39 pub max_output_tokens: Option<u64>,
40 pub max_completion_tokens: Option<u64>,
41 #[serde(default)]
42 pub capabilities: ModelCapabilities,
43}
44
45#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
46pub struct ModelCapabilities {
47 pub tools: bool,
48 pub images: bool,
49 pub parallel_tool_calls: bool,
50 pub prompt_cache_key: bool,
51}
52
53impl Default for ModelCapabilities {
54 fn default() -> Self {
55 Self {
56 tools: true,
57 images: false,
58 parallel_tool_calls: false,
59 prompt_cache_key: false,
60 }
61 }
62}
63
64pub struct OpenAiCompatibleLanguageModelProvider {
65 id: LanguageModelProviderId,
66 name: LanguageModelProviderName,
67 http_client: Arc<dyn HttpClient>,
68 state: gpui::Entity<State>,
69}
70
71pub struct State {
72 id: Arc<str>,
73 env_var_name: Arc<str>,
74 api_key: Option<String>,
75 api_key_from_env: bool,
76 settings: OpenAiCompatibleSettings,
77 _subscription: Subscription,
78}
79
80impl State {
81 fn is_authenticated(&self) -> bool {
82 self.api_key.is_some()
83 }
84
85 fn reset_api_key(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
86 let credentials_provider = <dyn CredentialsProvider>::global(cx);
87 let api_url = self.settings.api_url.clone();
88 cx.spawn(async move |this, cx| {
89 credentials_provider
90 .delete_credentials(&api_url, cx)
91 .await
92 .log_err();
93 this.update(cx, |this, cx| {
94 this.api_key = None;
95 this.api_key_from_env = false;
96 cx.notify();
97 })
98 })
99 }
100
101 fn set_api_key(&mut self, api_key: String, cx: &mut Context<Self>) -> Task<Result<()>> {
102 let credentials_provider = <dyn CredentialsProvider>::global(cx);
103 let api_url = self.settings.api_url.clone();
104 cx.spawn(async move |this, cx| {
105 credentials_provider
106 .write_credentials(&api_url, "Bearer", api_key.as_bytes(), cx)
107 .await
108 .log_err();
109 this.update(cx, |this, cx| {
110 this.api_key = Some(api_key);
111 cx.notify();
112 })
113 })
114 }
115
116 fn get_api_key(&self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
117 let credentials_provider = <dyn CredentialsProvider>::global(cx);
118 let env_var_name = self.env_var_name.clone();
119 let api_url = self.settings.api_url.clone();
120 cx.spawn(async move |this, cx| {
121 let (api_key, from_env) = if let Ok(api_key) = std::env::var(env_var_name.as_ref()) {
122 (api_key, true)
123 } else {
124 let (_, api_key) = credentials_provider
125 .read_credentials(&api_url, cx)
126 .await?
127 .ok_or(AuthenticateError::CredentialsNotFound)?;
128 (
129 String::from_utf8(api_key).context("invalid {PROVIDER_NAME} API key")?,
130 false,
131 )
132 };
133 this.update(cx, |this, cx| {
134 this.api_key = Some(api_key);
135 this.api_key_from_env = from_env;
136 cx.notify();
137 })?;
138
139 Ok(())
140 })
141 }
142
143 fn authenticate(&self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
144 if self.is_authenticated() {
145 return Task::ready(Ok(()));
146 }
147
148 self.get_api_key(cx)
149 }
150}
151
152impl OpenAiCompatibleLanguageModelProvider {
153 pub fn new(id: Arc<str>, http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
154 fn resolve_settings<'a>(id: &'a str, cx: &'a App) -> Option<&'a OpenAiCompatibleSettings> {
155 AllLanguageModelSettings::get_global(cx)
156 .openai_compatible
157 .get(id)
158 }
159
160 let state = cx.new(|cx| State {
161 id: id.clone(),
162 env_var_name: format!("{}_API_KEY", id).to_case(Case::Constant).into(),
163 settings: resolve_settings(&id, cx).cloned().unwrap_or_default(),
164 api_key: None,
165 api_key_from_env: false,
166 _subscription: cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
167 let Some(settings) = resolve_settings(&this.id, cx).cloned() else {
168 return;
169 };
170 if &this.settings != &settings {
171 if settings.api_url != this.settings.api_url && !this.api_key_from_env {
172 let spawn_task = cx.spawn(async move |handle, cx| {
173 if let Ok(task) = handle.update(cx, |this, cx| this.get_api_key(cx)) {
174 if let Err(_) = task.await {
175 handle
176 .update(cx, |this, _| {
177 this.api_key = None;
178 this.api_key_from_env = false;
179 })
180 .ok();
181 }
182 }
183 });
184 spawn_task.detach();
185 }
186
187 this.settings = settings;
188 cx.notify();
189 }
190 }),
191 });
192
193 Self {
194 id: id.clone().into(),
195 name: id.into(),
196 http_client,
197 state,
198 }
199 }
200
201 fn create_language_model(&self, model: AvailableModel) -> Arc<dyn LanguageModel> {
202 Arc::new(OpenAiCompatibleLanguageModel {
203 id: LanguageModelId::from(model.name.clone()),
204 provider_id: self.id.clone(),
205 provider_name: self.name.clone(),
206 model,
207 state: self.state.clone(),
208 http_client: self.http_client.clone(),
209 request_limiter: RateLimiter::new(4),
210 })
211 }
212}
213
214impl LanguageModelProviderState for OpenAiCompatibleLanguageModelProvider {
215 type ObservableEntity = State;
216
217 fn observable_entity(&self) -> Option<gpui::Entity<Self::ObservableEntity>> {
218 Some(self.state.clone())
219 }
220}
221
222impl LanguageModelProvider for OpenAiCompatibleLanguageModelProvider {
223 fn id(&self) -> LanguageModelProviderId {
224 self.id.clone()
225 }
226
227 fn name(&self) -> LanguageModelProviderName {
228 self.name.clone()
229 }
230
231 fn icon(&self) -> IconName {
232 IconName::AiOpenAiCompat
233 }
234
235 fn default_model(&self, cx: &App) -> Option<Arc<dyn LanguageModel>> {
236 self.state
237 .read(cx)
238 .settings
239 .available_models
240 .first()
241 .map(|model| self.create_language_model(model.clone()))
242 }
243
244 fn default_fast_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
245 None
246 }
247
248 fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
249 self.state
250 .read(cx)
251 .settings
252 .available_models
253 .iter()
254 .map(|model| self.create_language_model(model.clone()))
255 .collect()
256 }
257
258 fn is_authenticated(&self, cx: &App) -> bool {
259 self.state.read(cx).is_authenticated()
260 }
261
262 fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
263 self.state.update(cx, |state, cx| state.authenticate(cx))
264 }
265
266 fn configuration_view(
267 &self,
268 _target_agent: language_model::ConfigurationViewTargetAgent,
269 window: &mut Window,
270 cx: &mut App,
271 ) -> AnyView {
272 cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx))
273 .into()
274 }
275
276 fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
277 self.state.update(cx, |state, cx| state.reset_api_key(cx))
278 }
279}
280
281pub struct OpenAiCompatibleLanguageModel {
282 id: LanguageModelId,
283 provider_id: LanguageModelProviderId,
284 provider_name: LanguageModelProviderName,
285 model: AvailableModel,
286 state: gpui::Entity<State>,
287 http_client: Arc<dyn HttpClient>,
288 request_limiter: RateLimiter,
289}
290
291impl OpenAiCompatibleLanguageModel {
292 fn stream_completion(
293 &self,
294 request: open_ai::Request,
295 cx: &AsyncApp,
296 ) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<ResponseStreamEvent>>>>
297 {
298 let http_client = self.http_client.clone();
299 let Ok((api_key, api_url)) = cx.read_entity(&self.state, |state, _| {
300 (state.api_key.clone(), state.settings.api_url.clone())
301 }) else {
302 return futures::future::ready(Err(anyhow!("App state dropped"))).boxed();
303 };
304
305 let provider = self.provider_name.clone();
306 let future = self.request_limiter.stream(async move {
307 let Some(api_key) = api_key else {
308 return Err(LanguageModelCompletionError::NoApiKey { provider });
309 };
310 let request = stream_completion(http_client.as_ref(), &api_url, &api_key, request);
311 let response = request.await?;
312 Ok(response)
313 });
314
315 async move { Ok(future.await?.boxed()) }.boxed()
316 }
317}
318
319impl LanguageModel for OpenAiCompatibleLanguageModel {
320 fn id(&self) -> LanguageModelId {
321 self.id.clone()
322 }
323
324 fn name(&self) -> LanguageModelName {
325 LanguageModelName::from(
326 self.model
327 .display_name
328 .clone()
329 .unwrap_or_else(|| self.model.name.clone()),
330 )
331 }
332
333 fn provider_id(&self) -> LanguageModelProviderId {
334 self.provider_id.clone()
335 }
336
337 fn provider_name(&self) -> LanguageModelProviderName {
338 self.provider_name.clone()
339 }
340
341 fn supports_tools(&self) -> bool {
342 self.model.capabilities.tools
343 }
344
345 fn tool_input_format(&self) -> LanguageModelToolSchemaFormat {
346 LanguageModelToolSchemaFormat::JsonSchemaSubset
347 }
348
349 fn supports_images(&self) -> bool {
350 self.model.capabilities.images
351 }
352
353 fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
354 match choice {
355 LanguageModelToolChoice::Auto => self.model.capabilities.tools,
356 LanguageModelToolChoice::Any => self.model.capabilities.tools,
357 LanguageModelToolChoice::None => true,
358 }
359 }
360
361 fn telemetry_id(&self) -> String {
362 format!("openai/{}", self.model.name)
363 }
364
365 fn max_token_count(&self) -> u64 {
366 self.model.max_tokens
367 }
368
369 fn max_output_tokens(&self) -> Option<u64> {
370 self.model.max_output_tokens
371 }
372
373 fn count_tokens(
374 &self,
375 request: LanguageModelRequest,
376 cx: &App,
377 ) -> BoxFuture<'static, Result<u64>> {
378 let max_token_count = self.max_token_count();
379 cx.background_spawn(async move {
380 let messages = super::open_ai::collect_tiktoken_messages(request);
381 let model = if max_token_count >= 100_000 {
382 // If the max tokens is 100k or more, it is likely the o200k_base tokenizer from gpt4o
383 "gpt-4o"
384 } else {
385 // Otherwise fallback to gpt-4, since only cl100k_base and o200k_base are
386 // supported with this tiktoken method
387 "gpt-4"
388 };
389 tiktoken_rs::num_tokens_from_messages(model, &messages).map(|tokens| tokens as u64)
390 })
391 .boxed()
392 }
393
394 fn stream_completion(
395 &self,
396 request: LanguageModelRequest,
397 cx: &AsyncApp,
398 ) -> BoxFuture<
399 'static,
400 Result<
401 futures::stream::BoxStream<
402 'static,
403 Result<LanguageModelCompletionEvent, LanguageModelCompletionError>,
404 >,
405 LanguageModelCompletionError,
406 >,
407 > {
408 let request = into_open_ai(
409 request,
410 &self.model.name,
411 self.model.capabilities.parallel_tool_calls,
412 self.model.capabilities.prompt_cache_key,
413 self.max_output_tokens(),
414 None,
415 );
416 let completions = self.stream_completion(request, cx);
417 async move {
418 let mapper = OpenAiEventMapper::new();
419 Ok(mapper.map_stream(completions.await?).boxed())
420 }
421 .boxed()
422 }
423}
424
425struct ConfigurationView {
426 api_key_editor: Entity<SingleLineInput>,
427 state: gpui::Entity<State>,
428 load_credentials_task: Option<Task<()>>,
429}
430
431impl ConfigurationView {
432 fn new(state: gpui::Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
433 let api_key_editor = cx.new(|cx| {
434 SingleLineInput::new(
435 window,
436 cx,
437 "000000000000000000000000000000000000000000000000000",
438 )
439 });
440
441 cx.observe(&state, |_, _, cx| {
442 cx.notify();
443 })
444 .detach();
445
446 let load_credentials_task = Some(cx.spawn_in(window, {
447 let state = state.clone();
448 async move |this, cx| {
449 if let Some(task) = state
450 .update(cx, |state, cx| state.authenticate(cx))
451 .log_err()
452 {
453 // We don't log an error, because "not signed in" is also an error.
454 let _ = task.await;
455 }
456 this.update(cx, |this, cx| {
457 this.load_credentials_task = None;
458 cx.notify();
459 })
460 .log_err();
461 }
462 }));
463
464 Self {
465 api_key_editor,
466 state,
467 load_credentials_task,
468 }
469 }
470
471 fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
472 let api_key = self
473 .api_key_editor
474 .read(cx)
475 .editor()
476 .read(cx)
477 .text(cx)
478 .trim()
479 .to_string();
480
481 // Don't proceed if no API key is provided and we're not authenticated
482 if api_key.is_empty() && !self.state.read(cx).is_authenticated() {
483 return;
484 }
485
486 let state = self.state.clone();
487 cx.spawn_in(window, async move |_, cx| {
488 state
489 .update(cx, |state, cx| state.set_api_key(api_key, cx))?
490 .await
491 })
492 .detach_and_log_err(cx);
493
494 cx.notify();
495 }
496
497 fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context<Self>) {
498 self.api_key_editor.update(cx, |input, cx| {
499 input.editor.update(cx, |editor, cx| {
500 editor.set_text("", window, cx);
501 });
502 });
503
504 let state = self.state.clone();
505 cx.spawn_in(window, async move |_, cx| {
506 state.update(cx, |state, cx| state.reset_api_key(cx))?.await
507 })
508 .detach_and_log_err(cx);
509
510 cx.notify();
511 }
512
513 fn should_render_editor(&self, cx: &mut Context<Self>) -> bool {
514 !self.state.read(cx).is_authenticated()
515 }
516}
517
518impl Render for ConfigurationView {
519 fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
520 let env_var_set = self.state.read(cx).api_key_from_env;
521 let env_var_name = self.state.read(cx).env_var_name.clone();
522
523 let api_key_section = if self.should_render_editor(cx) {
524 v_flex()
525 .on_action(cx.listener(Self::save_api_key))
526 .child(Label::new("To use Zed's agent with an OpenAI-compatible provider, you need to add an API key."))
527 .child(
528 div()
529 .pt(DynamicSpacing::Base04.rems(cx))
530 .child(self.api_key_editor.clone())
531 )
532 .child(
533 Label::new(
534 format!("You can also assign the {env_var_name} environment variable and restart Zed."),
535 )
536 .size(LabelSize::Small).color(Color::Muted),
537 )
538 .into_any()
539 } else {
540 h_flex()
541 .mt_1()
542 .p_1()
543 .justify_between()
544 .rounded_md()
545 .border_1()
546 .border_color(cx.theme().colors().border)
547 .bg(cx.theme().colors().background)
548 .child(
549 h_flex()
550 .gap_1()
551 .child(Icon::new(IconName::Check).color(Color::Success))
552 .child(Label::new(if env_var_set {
553 format!("API key set in {env_var_name} environment variable.")
554 } else {
555 "API key configured.".to_string()
556 })),
557 )
558 .child(
559 Button::new("reset-api-key", "Reset API Key")
560 .label_size(LabelSize::Small)
561 .icon(IconName::Undo)
562 .icon_size(IconSize::Small)
563 .icon_position(IconPosition::Start)
564 .layer(ElevationIndex::ModalSurface)
565 .when(env_var_set, |this| {
566 this.tooltip(Tooltip::text(format!("To reset your API key, unset the {env_var_name} environment variable.")))
567 })
568 .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx))),
569 )
570 .into_any()
571 };
572
573 if self.load_credentials_task.is_some() {
574 div().child(Label::new("Loading credentials…")).into_any()
575 } else {
576 v_flex().size_full().child(api_key_section).into_any()
577 }
578 }
579}