1use anyhow::{Result, anyhow};
2use convert_case::{Case, Casing};
3use futures::{FutureExt, StreamExt, future, future::BoxFuture};
4use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window};
5use http_client::HttpClient;
6use language_model::{
7 AuthenticateError, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
8 LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
9 LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
10 LanguageModelToolChoice, LanguageModelToolSchemaFormat, RateLimiter,
11};
12use menu;
13use open_ai::{ResponseStreamEvent, stream_completion};
14use schemars::JsonSchema;
15use serde::{Deserialize, Serialize};
16use settings::{Settings, SettingsStore};
17use std::sync::Arc;
18use ui::{ElevationIndex, Tooltip, prelude::*};
19use ui_input::SingleLineInput;
20use util::{ResultExt, truncate_and_trailoff};
21use zed_env_vars::EnvVar;
22
23use crate::api_key::ApiKeyState;
24use crate::provider::open_ai::{OpenAiEventMapper, into_open_ai};
25
26#[derive(Default, Clone, Debug, PartialEq)]
27pub struct OpenAiCompatibleSettings {
28 pub api_url: String,
29 pub available_models: Vec<AvailableModel>,
30}
31
32#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
33pub struct AvailableModel {
34 pub name: String,
35 pub display_name: Option<String>,
36 pub max_tokens: u64,
37 pub max_output_tokens: Option<u64>,
38 pub max_completion_tokens: Option<u64>,
39 #[serde(default)]
40 pub capabilities: ModelCapabilities,
41}
42
43#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
44pub struct ModelCapabilities {
45 pub tools: bool,
46 pub images: bool,
47 pub parallel_tool_calls: bool,
48 pub prompt_cache_key: bool,
49}
50
51impl Default for ModelCapabilities {
52 fn default() -> Self {
53 Self {
54 tools: true,
55 images: false,
56 parallel_tool_calls: false,
57 prompt_cache_key: false,
58 }
59 }
60}
61
62pub struct OpenAiCompatibleLanguageModelProvider {
63 id: LanguageModelProviderId,
64 name: LanguageModelProviderName,
65 http_client: Arc<dyn HttpClient>,
66 state: gpui::Entity<State>,
67}
68
69pub struct State {
70 id: Arc<str>,
71 api_key_env_var: EnvVar,
72 api_key_state: ApiKeyState,
73 settings: OpenAiCompatibleSettings,
74}
75
76impl State {
77 fn is_authenticated(&self) -> bool {
78 self.api_key_state.has_key()
79 }
80
81 fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
82 let api_url = SharedString::new(self.settings.api_url.as_str());
83 self.api_key_state
84 .store(api_url, api_key, |this| &mut this.api_key_state, cx)
85 }
86
87 fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
88 let api_url = SharedString::new(self.settings.api_url.clone());
89 self.api_key_state.load_if_needed(
90 api_url,
91 &self.api_key_env_var,
92 |this| &mut this.api_key_state,
93 cx,
94 )
95 }
96}
97
98impl OpenAiCompatibleLanguageModelProvider {
99 pub fn new(id: Arc<str>, http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
100 fn resolve_settings<'a>(id: &'a str, cx: &'a App) -> Option<&'a OpenAiCompatibleSettings> {
101 crate::AllLanguageModelSettings::get_global(cx)
102 .openai_compatible
103 .get(id)
104 }
105
106 let api_key_env_var_name = format!("{}_API_KEY", id).to_case(Case::UpperSnake).into();
107 let state = cx.new(|cx| {
108 cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
109 let Some(settings) = resolve_settings(&this.id, cx).cloned() else {
110 return;
111 };
112 if &this.settings != &settings {
113 let api_url = SharedString::new(settings.api_url.as_str());
114 this.api_key_state.handle_url_change(
115 api_url,
116 &this.api_key_env_var,
117 |this| &mut this.api_key_state,
118 cx,
119 );
120 this.settings = settings;
121 cx.notify();
122 }
123 })
124 .detach();
125 let settings = resolve_settings(&id, cx).cloned().unwrap_or_default();
126 State {
127 id: id.clone(),
128 api_key_env_var: EnvVar::new(api_key_env_var_name),
129 api_key_state: ApiKeyState::new(SharedString::new(settings.api_url.as_str())),
130 settings,
131 }
132 });
133
134 Self {
135 id: id.clone().into(),
136 name: id.into(),
137 http_client,
138 state,
139 }
140 }
141
142 fn create_language_model(&self, model: AvailableModel) -> Arc<dyn LanguageModel> {
143 Arc::new(OpenAiCompatibleLanguageModel {
144 id: LanguageModelId::from(model.name.clone()),
145 provider_id: self.id.clone(),
146 provider_name: self.name.clone(),
147 model,
148 state: self.state.clone(),
149 http_client: self.http_client.clone(),
150 request_limiter: RateLimiter::new(4),
151 })
152 }
153}
154
155impl LanguageModelProviderState for OpenAiCompatibleLanguageModelProvider {
156 type ObservableEntity = State;
157
158 fn observable_entity(&self) -> Option<gpui::Entity<Self::ObservableEntity>> {
159 Some(self.state.clone())
160 }
161}
162
163impl LanguageModelProvider for OpenAiCompatibleLanguageModelProvider {
164 fn id(&self) -> LanguageModelProviderId {
165 self.id.clone()
166 }
167
168 fn name(&self) -> LanguageModelProviderName {
169 self.name.clone()
170 }
171
172 fn icon(&self) -> IconName {
173 IconName::AiOpenAiCompat
174 }
175
176 fn default_model(&self, cx: &App) -> Option<Arc<dyn LanguageModel>> {
177 self.state
178 .read(cx)
179 .settings
180 .available_models
181 .first()
182 .map(|model| self.create_language_model(model.clone()))
183 }
184
185 fn default_fast_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
186 None
187 }
188
189 fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
190 self.state
191 .read(cx)
192 .settings
193 .available_models
194 .iter()
195 .map(|model| self.create_language_model(model.clone()))
196 .collect()
197 }
198
199 fn is_authenticated(&self, cx: &App) -> bool {
200 self.state.read(cx).is_authenticated()
201 }
202
203 fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
204 self.state.update(cx, |state, cx| state.authenticate(cx))
205 }
206
207 fn configuration_view(
208 &self,
209 _target_agent: language_model::ConfigurationViewTargetAgent,
210 window: &mut Window,
211 cx: &mut App,
212 ) -> AnyView {
213 cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx))
214 .into()
215 }
216
217 fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
218 self.state
219 .update(cx, |state, cx| state.set_api_key(None, cx))
220 }
221}
222
223pub struct OpenAiCompatibleLanguageModel {
224 id: LanguageModelId,
225 provider_id: LanguageModelProviderId,
226 provider_name: LanguageModelProviderName,
227 model: AvailableModel,
228 state: gpui::Entity<State>,
229 http_client: Arc<dyn HttpClient>,
230 request_limiter: RateLimiter,
231}
232
233impl OpenAiCompatibleLanguageModel {
234 fn stream_completion(
235 &self,
236 request: open_ai::Request,
237 cx: &AsyncApp,
238 ) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<ResponseStreamEvent>>>>
239 {
240 let http_client = self.http_client.clone();
241
242 let Ok((api_key, api_url)) = self.state.read_with(cx, |state, _cx| {
243 let api_url = &state.settings.api_url;
244 (
245 state.api_key_state.key(api_url),
246 state.settings.api_url.clone(),
247 )
248 }) else {
249 return future::ready(Err(anyhow!("App state dropped"))).boxed();
250 };
251
252 let provider = self.provider_name.clone();
253 let future = self.request_limiter.stream(async move {
254 let Some(api_key) = api_key else {
255 return Err(LanguageModelCompletionError::NoApiKey { provider });
256 };
257 let request = stream_completion(http_client.as_ref(), &api_url, &api_key, request);
258 let response = request.await?;
259 Ok(response)
260 });
261
262 async move { Ok(future.await?.boxed()) }.boxed()
263 }
264}
265
266impl LanguageModel for OpenAiCompatibleLanguageModel {
267 fn id(&self) -> LanguageModelId {
268 self.id.clone()
269 }
270
271 fn name(&self) -> LanguageModelName {
272 LanguageModelName::from(
273 self.model
274 .display_name
275 .clone()
276 .unwrap_or_else(|| self.model.name.clone()),
277 )
278 }
279
280 fn provider_id(&self) -> LanguageModelProviderId {
281 self.provider_id.clone()
282 }
283
284 fn provider_name(&self) -> LanguageModelProviderName {
285 self.provider_name.clone()
286 }
287
288 fn supports_tools(&self) -> bool {
289 self.model.capabilities.tools
290 }
291
292 fn tool_input_format(&self) -> LanguageModelToolSchemaFormat {
293 LanguageModelToolSchemaFormat::JsonSchemaSubset
294 }
295
296 fn supports_images(&self) -> bool {
297 self.model.capabilities.images
298 }
299
300 fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
301 match choice {
302 LanguageModelToolChoice::Auto => self.model.capabilities.tools,
303 LanguageModelToolChoice::Any => self.model.capabilities.tools,
304 LanguageModelToolChoice::None => true,
305 }
306 }
307
308 fn telemetry_id(&self) -> String {
309 format!("openai/{}", self.model.name)
310 }
311
312 fn max_token_count(&self) -> u64 {
313 self.model.max_tokens
314 }
315
316 fn max_output_tokens(&self) -> Option<u64> {
317 self.model.max_output_tokens
318 }
319
320 fn count_tokens(
321 &self,
322 request: LanguageModelRequest,
323 cx: &App,
324 ) -> BoxFuture<'static, Result<u64>> {
325 let max_token_count = self.max_token_count();
326 cx.background_spawn(async move {
327 let messages = super::open_ai::collect_tiktoken_messages(request);
328 let model = if max_token_count >= 100_000 {
329 // If the max tokens is 100k or more, it is likely the o200k_base tokenizer from gpt4o
330 "gpt-4o"
331 } else {
332 // Otherwise fallback to gpt-4, since only cl100k_base and o200k_base are
333 // supported with this tiktoken method
334 "gpt-4"
335 };
336 tiktoken_rs::num_tokens_from_messages(model, &messages).map(|tokens| tokens as u64)
337 })
338 .boxed()
339 }
340
341 fn stream_completion(
342 &self,
343 request: LanguageModelRequest,
344 cx: &AsyncApp,
345 ) -> BoxFuture<
346 'static,
347 Result<
348 futures::stream::BoxStream<
349 'static,
350 Result<LanguageModelCompletionEvent, LanguageModelCompletionError>,
351 >,
352 LanguageModelCompletionError,
353 >,
354 > {
355 let request = into_open_ai(
356 request,
357 &self.model.name,
358 self.model.capabilities.parallel_tool_calls,
359 self.model.capabilities.prompt_cache_key,
360 self.max_output_tokens(),
361 None,
362 );
363 let completions = self.stream_completion(request, cx);
364 async move {
365 let mapper = OpenAiEventMapper::new();
366 Ok(mapper.map_stream(completions.await?).boxed())
367 }
368 .boxed()
369 }
370}
371
372struct ConfigurationView {
373 api_key_editor: Entity<SingleLineInput>,
374 state: gpui::Entity<State>,
375 load_credentials_task: Option<Task<()>>,
376}
377
378impl ConfigurationView {
379 fn new(state: gpui::Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
380 let api_key_editor = cx.new(|cx| {
381 SingleLineInput::new(
382 window,
383 cx,
384 "000000000000000000000000000000000000000000000000000",
385 )
386 });
387
388 cx.observe(&state, |_, _, cx| {
389 cx.notify();
390 })
391 .detach();
392
393 let load_credentials_task = Some(cx.spawn_in(window, {
394 let state = state.clone();
395 async move |this, cx| {
396 if let Some(task) = state
397 .update(cx, |state, cx| state.authenticate(cx))
398 .log_err()
399 {
400 // We don't log an error, because "not signed in" is also an error.
401 let _ = task.await;
402 }
403 this.update(cx, |this, cx| {
404 this.load_credentials_task = None;
405 cx.notify();
406 })
407 .log_err();
408 }
409 }));
410
411 Self {
412 api_key_editor,
413 state,
414 load_credentials_task,
415 }
416 }
417
418 fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
419 let api_key = self.api_key_editor.read(cx).text(cx).trim().to_string();
420 if api_key.is_empty() {
421 return;
422 }
423
424 // url changes can cause the editor to be displayed again
425 self.api_key_editor
426 .update(cx, |input, cx| input.set_text("", window, cx));
427
428 let state = self.state.clone();
429 cx.spawn_in(window, async move |_, cx| {
430 state
431 .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))?
432 .await
433 })
434 .detach_and_log_err(cx);
435 }
436
437 fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context<Self>) {
438 self.api_key_editor
439 .update(cx, |input, cx| input.set_text("", window, cx));
440
441 let state = self.state.clone();
442 cx.spawn_in(window, async move |_, cx| {
443 state
444 .update(cx, |state, cx| state.set_api_key(None, cx))?
445 .await
446 })
447 .detach_and_log_err(cx);
448 }
449
450 fn should_render_editor(&self, cx: &Context<Self>) -> bool {
451 !self.state.read(cx).is_authenticated()
452 }
453}
454
455impl Render for ConfigurationView {
456 fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
457 let state = self.state.read(cx);
458 let env_var_set = state.api_key_state.is_from_env_var();
459 let env_var_name = &state.api_key_env_var.name;
460
461 let api_key_section = if self.should_render_editor(cx) {
462 v_flex()
463 .on_action(cx.listener(Self::save_api_key))
464 .child(Label::new("To use Zed's agent with an OpenAI-compatible provider, you need to add an API key."))
465 .child(
466 div()
467 .pt(DynamicSpacing::Base04.rems(cx))
468 .child(self.api_key_editor.clone())
469 )
470 .child(
471 Label::new(
472 format!("You can also assign the {env_var_name} environment variable and restart Zed."),
473 )
474 .size(LabelSize::Small).color(Color::Muted),
475 )
476 .into_any()
477 } else {
478 h_flex()
479 .mt_1()
480 .p_1()
481 .justify_between()
482 .rounded_md()
483 .border_1()
484 .border_color(cx.theme().colors().border)
485 .bg(cx.theme().colors().background)
486 .child(
487 h_flex()
488 .gap_1()
489 .child(Icon::new(IconName::Check).color(Color::Success))
490 .child(Label::new(if env_var_set {
491 format!("API key set in {env_var_name} environment variable")
492 } else {
493 format!("API key configured for {}", truncate_and_trailoff(&state.settings.api_url, 32))
494 })),
495 )
496 .child(
497 Button::new("reset-api-key", "Reset API Key")
498 .label_size(LabelSize::Small)
499 .icon(IconName::Undo)
500 .icon_size(IconSize::Small)
501 .icon_position(IconPosition::Start)
502 .layer(ElevationIndex::ModalSurface)
503 .when(env_var_set, |this| {
504 this.tooltip(Tooltip::text(format!("To reset your API key, unset the {env_var_name} environment variable.")))
505 })
506 .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx))),
507 )
508 .into_any()
509 };
510
511 if self.load_credentials_task.is_some() {
512 div().child(Label::new("Loading credentials…")).into_any()
513 } else {
514 v_flex().size_full().child(api_key_section).into_any()
515 }
516 }
517}