1use anyhow::Result;
2use collections::BTreeMap;
3use credentials_provider::CredentialsProvider;
4use futures::{FutureExt, StreamExt, future::BoxFuture};
5use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window};
6use http_client::HttpClient;
7use language_model::{
8 ApiKeyState, AuthenticateError, EnvVar, IconOrSvg, LanguageModel, LanguageModelCompletionError,
9 LanguageModelCompletionEvent, LanguageModelId, LanguageModelName, LanguageModelProvider,
10 LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState,
11 LanguageModelRequest, LanguageModelToolChoice, RateLimiter, Role, env_var,
12};
13use opencode::{ApiProtocol, OPENCODE_API_URL};
14pub use settings::OpenCodeAvailableModel as AvailableModel;
15use settings::{Settings, SettingsStore};
16use std::sync::{Arc, LazyLock};
17use strum::IntoEnumIterator;
18use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*};
19use ui_input::InputField;
20use util::ResultExt;
21
22use crate::provider::anthropic::{AnthropicEventMapper, into_anthropic};
23use crate::provider::google::{GoogleEventMapper, into_google};
24use crate::provider::open_ai::{
25 OpenAiEventMapper, OpenAiResponseEventMapper, into_open_ai, into_open_ai_response,
26};
27
28const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("opencode");
29const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("OpenCode Zen");
30
31const API_KEY_ENV_VAR_NAME: &str = "OPENCODE_API_KEY";
32static API_KEY_ENV_VAR: LazyLock<EnvVar> = env_var!(API_KEY_ENV_VAR_NAME);
33
34#[derive(Default, Clone, Debug, PartialEq)]
35pub struct OpenCodeSettings {
36 pub api_url: String,
37 pub available_models: Vec<AvailableModel>,
38}
39
40pub struct OpenCodeLanguageModelProvider {
41 http_client: Arc<dyn HttpClient>,
42 state: Entity<State>,
43}
44
45pub struct State {
46 api_key_state: ApiKeyState,
47 credentials_provider: Arc<dyn CredentialsProvider>,
48}
49
50impl State {
51 fn is_authenticated(&self) -> bool {
52 self.api_key_state.has_key()
53 }
54
55 fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
56 let credentials_provider = self.credentials_provider.clone();
57 let api_url = OpenCodeLanguageModelProvider::api_url(cx);
58 self.api_key_state.store(
59 api_url,
60 api_key,
61 |this| &mut this.api_key_state,
62 credentials_provider,
63 cx,
64 )
65 }
66
67 fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
68 let credentials_provider = self.credentials_provider.clone();
69 let api_url = OpenCodeLanguageModelProvider::api_url(cx);
70 self.api_key_state.load_if_needed(
71 api_url,
72 |this| &mut this.api_key_state,
73 credentials_provider,
74 cx,
75 )
76 }
77}
78
79impl OpenCodeLanguageModelProvider {
80 pub fn new(
81 http_client: Arc<dyn HttpClient>,
82 credentials_provider: Arc<dyn CredentialsProvider>,
83 cx: &mut App,
84 ) -> Self {
85 let state = cx.new(|cx| {
86 cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
87 let credentials_provider = this.credentials_provider.clone();
88 let api_url = Self::api_url(cx);
89 this.api_key_state.handle_url_change(
90 api_url,
91 |this| &mut this.api_key_state,
92 credentials_provider,
93 cx,
94 );
95 cx.notify();
96 })
97 .detach();
98 State {
99 api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()),
100 credentials_provider,
101 }
102 });
103
104 Self { http_client, state }
105 }
106
107 fn create_language_model(&self, model: opencode::Model) -> Arc<dyn LanguageModel> {
108 Arc::new(OpenCodeLanguageModel {
109 id: LanguageModelId::from(model.id().to_string()),
110 model,
111 state: self.state.clone(),
112 http_client: self.http_client.clone(),
113 request_limiter: RateLimiter::new(4),
114 })
115 }
116
117 pub fn settings(cx: &App) -> &OpenCodeSettings {
118 &crate::AllLanguageModelSettings::get_global(cx).opencode
119 }
120
121 fn api_url(cx: &App) -> SharedString {
122 let api_url = &Self::settings(cx).api_url;
123 if api_url.is_empty() {
124 OPENCODE_API_URL.into()
125 } else {
126 SharedString::new(api_url.as_str())
127 }
128 }
129}
130
131impl LanguageModelProviderState for OpenCodeLanguageModelProvider {
132 type ObservableEntity = State;
133
134 fn observable_entity(&self) -> Option<Entity<Self::ObservableEntity>> {
135 Some(self.state.clone())
136 }
137}
138
139impl LanguageModelProvider for OpenCodeLanguageModelProvider {
140 fn id(&self) -> LanguageModelProviderId {
141 PROVIDER_ID
142 }
143
144 fn name(&self) -> LanguageModelProviderName {
145 PROVIDER_NAME
146 }
147
148 fn icon(&self) -> IconOrSvg {
149 IconOrSvg::Icon(IconName::AiOpenCode)
150 }
151
152 fn default_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
153 Some(self.create_language_model(opencode::Model::default()))
154 }
155
156 fn default_fast_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
157 Some(self.create_language_model(opencode::Model::default_fast()))
158 }
159
160 fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
161 let mut models = BTreeMap::default();
162
163 for model in opencode::Model::iter() {
164 if !matches!(model, opencode::Model::Custom { .. }) {
165 models.insert(model.id().to_string(), model);
166 }
167 }
168
169 for model in &Self::settings(cx).available_models {
170 let protocol = match model.protocol.as_str() {
171 "anthropic" => ApiProtocol::Anthropic,
172 "openai_responses" => ApiProtocol::OpenAiResponses,
173 "openai_chat" => ApiProtocol::OpenAiChat,
174 "google" => ApiProtocol::Google,
175 _ => ApiProtocol::OpenAiChat, // default fallback
176 };
177 models.insert(
178 model.name.clone(),
179 opencode::Model::Custom {
180 name: model.name.clone(),
181 display_name: model.display_name.clone(),
182 max_tokens: model.max_tokens,
183 max_output_tokens: model.max_output_tokens,
184 protocol,
185 },
186 );
187 }
188
189 models
190 .into_values()
191 .map(|model| self.create_language_model(model))
192 .collect()
193 }
194
195 fn is_authenticated(&self, cx: &App) -> bool {
196 self.state.read(cx).is_authenticated()
197 }
198
199 fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
200 self.state.update(cx, |state, cx| state.authenticate(cx))
201 }
202
203 fn configuration_view(
204 &self,
205 _target_agent: language_model::ConfigurationViewTargetAgent,
206 window: &mut Window,
207 cx: &mut App,
208 ) -> AnyView {
209 cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx))
210 .into()
211 }
212
213 fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
214 self.state
215 .update(cx, |state, cx| state.set_api_key(None, cx))
216 }
217}
218
219pub struct OpenCodeLanguageModel {
220 id: LanguageModelId,
221 model: opencode::Model,
222 state: Entity<State>,
223 http_client: Arc<dyn HttpClient>,
224 request_limiter: RateLimiter,
225}
226
227impl OpenCodeLanguageModel {
228 /// Returns the base API URL (e.g., "https://opencode.ai/zen").
229 fn base_api_url(&self, cx: &AsyncApp) -> SharedString {
230 self.state
231 .read_with(cx, |_, cx| OpenCodeLanguageModelProvider::api_url(cx))
232 }
233
234 fn api_key(&self, cx: &AsyncApp) -> Option<Arc<str>> {
235 self.state.read_with(cx, |state, cx| {
236 let api_url = OpenCodeLanguageModelProvider::api_url(cx);
237 state.api_key_state.key(&api_url)
238 })
239 }
240
241 fn stream_anthropic(
242 &self,
243 request: anthropic::Request,
244 cx: &AsyncApp,
245 ) -> BoxFuture<
246 'static,
247 Result<
248 futures::stream::BoxStream<
249 'static,
250 Result<anthropic::Event, anthropic::AnthropicError>,
251 >,
252 LanguageModelCompletionError,
253 >,
254 > {
255 let http_client = self.http_client.clone();
256 // Anthropic crate appends /v1/messages to api_url
257 let api_url = self.base_api_url(cx);
258 let api_key = self.api_key(cx);
259
260 let future = self.request_limiter.stream(async move {
261 let Some(api_key) = api_key else {
262 return Err(LanguageModelCompletionError::NoApiKey {
263 provider: PROVIDER_NAME,
264 });
265 };
266 let request = anthropic::stream_completion(
267 http_client.as_ref(),
268 &api_url,
269 &api_key,
270 request,
271 None,
272 );
273 let response = request.await?;
274 Ok(response)
275 });
276
277 async move { Ok(future.await?.boxed()) }.boxed()
278 }
279
280 fn stream_openai_chat(
281 &self,
282 request: open_ai::Request,
283 cx: &AsyncApp,
284 ) -> BoxFuture<
285 'static,
286 Result<futures::stream::BoxStream<'static, Result<open_ai::ResponseStreamEvent>>>,
287 > {
288 let http_client = self.http_client.clone();
289 // OpenAI crate appends /chat/completions to api_url, so we pass base + "/v1"
290 let base_url = self.base_api_url(cx);
291 let api_url: SharedString = format!("{base_url}/v1").into();
292 let api_key = self.api_key(cx);
293 let provider_name = PROVIDER_NAME.0.to_string();
294
295 let future = self.request_limiter.stream(async move {
296 let Some(api_key) = api_key else {
297 return Err(LanguageModelCompletionError::NoApiKey {
298 provider: PROVIDER_NAME,
299 });
300 };
301 let request = open_ai::stream_completion(
302 http_client.as_ref(),
303 &provider_name,
304 &api_url,
305 &api_key,
306 request,
307 );
308 let response = request.await?;
309 Ok(response)
310 });
311
312 async move { Ok(future.await?.boxed()) }.boxed()
313 }
314
315 fn stream_openai_response(
316 &self,
317 request: open_ai::responses::Request,
318 cx: &AsyncApp,
319 ) -> BoxFuture<
320 'static,
321 Result<futures::stream::BoxStream<'static, Result<open_ai::responses::StreamEvent>>>,
322 > {
323 let http_client = self.http_client.clone();
324 // Responses crate appends /responses to api_url, so we pass base + "/v1"
325 let base_url = self.base_api_url(cx);
326 let api_url: SharedString = format!("{base_url}/v1").into();
327 let api_key = self.api_key(cx);
328 let provider_name = PROVIDER_NAME.0.to_string();
329
330 let future = self.request_limiter.stream(async move {
331 let Some(api_key) = api_key else {
332 return Err(LanguageModelCompletionError::NoApiKey {
333 provider: PROVIDER_NAME,
334 });
335 };
336 let request = open_ai::responses::stream_response(
337 http_client.as_ref(),
338 &provider_name,
339 &api_url,
340 &api_key,
341 request,
342 );
343 let response = request.await?;
344 Ok(response)
345 });
346
347 async move { Ok(future.await?.boxed()) }.boxed()
348 }
349
350 fn stream_google_zen(
351 &self,
352 request: google_ai::GenerateContentRequest,
353 cx: &AsyncApp,
354 ) -> BoxFuture<
355 'static,
356 Result<futures::stream::BoxStream<'static, Result<google_ai::GenerateContentResponse>>>,
357 > {
358 let http_client = self.http_client.clone();
359 let api_url = self.base_api_url(cx);
360 let api_key = self.api_key(cx);
361
362 let future = self.request_limiter.stream(async move {
363 let Some(api_key) = api_key else {
364 return Err(LanguageModelCompletionError::NoApiKey {
365 provider: PROVIDER_NAME,
366 });
367 };
368 let request = opencode::stream_generate_content_zen(
369 http_client.as_ref(),
370 &api_url,
371 &api_key,
372 request,
373 );
374 let response = request.await?;
375 Ok(response)
376 });
377
378 async move { Ok(future.await?.boxed()) }.boxed()
379 }
380}
381
382impl LanguageModel for OpenCodeLanguageModel {
383 fn id(&self) -> LanguageModelId {
384 self.id.clone()
385 }
386
387 fn name(&self) -> LanguageModelName {
388 LanguageModelName::from(self.model.display_name().to_string())
389 }
390
391 fn provider_id(&self) -> LanguageModelProviderId {
392 PROVIDER_ID
393 }
394
395 fn provider_name(&self) -> LanguageModelProviderName {
396 PROVIDER_NAME
397 }
398
399 fn supports_tools(&self) -> bool {
400 self.model.supports_tools()
401 }
402
403 fn supports_images(&self) -> bool {
404 self.model.supports_images()
405 }
406
407 fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
408 match choice {
409 LanguageModelToolChoice::Auto | LanguageModelToolChoice::Any => true,
410 LanguageModelToolChoice::None => {
411 // Google models don't support None tool choice
412 self.model.protocol() != ApiProtocol::Google
413 }
414 }
415 }
416
417 fn telemetry_id(&self) -> String {
418 format!("opencode/{}", self.model.id())
419 }
420
421 fn max_token_count(&self) -> u64 {
422 self.model.max_token_count()
423 }
424
425 fn max_output_tokens(&self) -> Option<u64> {
426 self.model.max_output_tokens()
427 }
428
429 fn count_tokens(
430 &self,
431 request: LanguageModelRequest,
432 cx: &App,
433 ) -> BoxFuture<'static, Result<u64>> {
434 cx.background_spawn(async move {
435 let messages = request
436 .messages
437 .into_iter()
438 .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
439 role: match message.role {
440 Role::User => "user".into(),
441 Role::Assistant => "assistant".into(),
442 Role::System => "system".into(),
443 },
444 content: Some(message.string_contents()),
445 name: None,
446 function_call: None,
447 })
448 .collect::<Vec<_>>();
449
450 tiktoken_rs::num_tokens_from_messages("gpt-4o", &messages).map(|tokens| tokens as u64)
451 })
452 .boxed()
453 }
454
455 fn stream_completion(
456 &self,
457 request: LanguageModelRequest,
458 cx: &AsyncApp,
459 ) -> BoxFuture<
460 'static,
461 Result<
462 futures::stream::BoxStream<
463 'static,
464 Result<LanguageModelCompletionEvent, LanguageModelCompletionError>,
465 >,
466 LanguageModelCompletionError,
467 >,
468 > {
469 match self.model.protocol() {
470 ApiProtocol::Anthropic => {
471 let anthropic_request = into_anthropic(
472 request,
473 self.model.id().to_string(),
474 1.0,
475 self.model.max_output_tokens().unwrap_or(8192),
476 anthropic::AnthropicModelMode::Default,
477 );
478 let stream = self.stream_anthropic(anthropic_request, cx);
479 async move {
480 let mapper = AnthropicEventMapper::new();
481 Ok(mapper.map_stream(stream.await?).boxed())
482 }
483 .boxed()
484 }
485 ApiProtocol::OpenAiChat => {
486 let openai_request = into_open_ai(
487 request,
488 self.model.id(),
489 false,
490 false,
491 self.model.max_output_tokens(),
492 None,
493 );
494 let stream = self.stream_openai_chat(openai_request, cx);
495 async move {
496 let mapper = OpenAiEventMapper::new();
497 Ok(mapper.map_stream(stream.await?).boxed())
498 }
499 .boxed()
500 }
501 ApiProtocol::OpenAiResponses => {
502 let response_request = into_open_ai_response(
503 request,
504 self.model.id(),
505 false,
506 false,
507 self.model.max_output_tokens(),
508 None,
509 );
510 let stream = self.stream_openai_response(response_request, cx);
511 async move {
512 let mapper = OpenAiResponseEventMapper::new();
513 Ok(mapper.map_stream(stream.await?).boxed())
514 }
515 .boxed()
516 }
517 ApiProtocol::Google => {
518 let google_request = into_google(
519 request,
520 self.model.id().to_string(),
521 google_ai::GoogleModelMode::Default,
522 );
523 let stream = self.stream_google_zen(google_request, cx);
524 async move {
525 let mapper = GoogleEventMapper::new();
526 Ok(mapper.map_stream(stream.await?.boxed()).boxed())
527 }
528 .boxed()
529 }
530 }
531 }
532}
533
534struct ConfigurationView {
535 api_key_editor: Entity<InputField>,
536 state: Entity<State>,
537 load_credentials_task: Option<Task<()>>,
538}
539
540impl ConfigurationView {
541 fn new(state: Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
542 let api_key_editor = cx.new(|cx| {
543 InputField::new(window, cx, "sk-00000000000000000000000000000000").label("API key")
544 });
545
546 cx.observe(&state, |_, _, cx| {
547 cx.notify();
548 })
549 .detach();
550
551 let load_credentials_task = Some(cx.spawn_in(window, {
552 let state = state.clone();
553 async move |this, cx| {
554 if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) {
555 let _ = task.await;
556 }
557 this.update(cx, |this, cx| {
558 this.load_credentials_task = None;
559 cx.notify();
560 })
561 .log_err();
562 }
563 }));
564
565 Self {
566 api_key_editor,
567 state,
568 load_credentials_task,
569 }
570 }
571
572 fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
573 let api_key = self.api_key_editor.read(cx).text(cx).trim().to_string();
574 if api_key.is_empty() {
575 return;
576 }
577
578 self.api_key_editor
579 .update(cx, |editor, cx| editor.set_text("", window, cx));
580
581 let state = self.state.clone();
582 cx.spawn_in(window, async move |_, cx| {
583 state
584 .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))
585 .await
586 })
587 .detach_and_log_err(cx);
588 }
589
590 fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context<Self>) {
591 self.api_key_editor
592 .update(cx, |editor, cx| editor.set_text("", window, cx));
593
594 let state = self.state.clone();
595 cx.spawn_in(window, async move |_, cx| {
596 state
597 .update(cx, |state, cx| state.set_api_key(None, cx))
598 .await
599 })
600 .detach_and_log_err(cx);
601 }
602
603 fn should_render_editor(&self, cx: &mut Context<Self>) -> bool {
604 !self.state.read(cx).is_authenticated()
605 }
606}
607
608impl Render for ConfigurationView {
609 fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
610 let env_var_set = self.state.read(cx).api_key_state.is_from_env_var();
611 let configured_card_label = if env_var_set {
612 format!("API key set in {API_KEY_ENV_VAR_NAME} environment variable")
613 } else {
614 let api_url = OpenCodeLanguageModelProvider::api_url(cx);
615 if api_url == OPENCODE_API_URL {
616 "API key configured".to_string()
617 } else {
618 format!("API key configured for {}", api_url)
619 }
620 };
621
622 let api_key_section = if self.should_render_editor(cx) {
623 v_flex()
624 .on_action(cx.listener(Self::save_api_key))
625 .child(Label::new(
626 "To use OpenCode Zen models in Zed, you need an API key:",
627 ))
628 .child(
629 List::new()
630 .child(
631 ListBulletItem::new("")
632 .child(Label::new("Sign in and get your key at"))
633 .child(ButtonLink::new(
634 "OpenCode Zen Console",
635 "https://opencode.ai/zen",
636 )),
637 )
638 .child(ListBulletItem::new(
639 "Paste your API key below and hit enter to start using OpenCode Zen",
640 )),
641 )
642 .child(self.api_key_editor.clone())
643 .child(
644 Label::new(format!(
645 "You can also set the {API_KEY_ENV_VAR_NAME} environment variable and restart Zed."
646 ))
647 .size(LabelSize::Small)
648 .color(Color::Muted),
649 )
650 .into_any_element()
651 } else {
652 ConfiguredApiCard::new(configured_card_label)
653 .disabled(env_var_set)
654 .when(env_var_set, |this| {
655 this.tooltip_label(format!(
656 "To reset your API key, unset the {API_KEY_ENV_VAR_NAME} environment variable."
657 ))
658 })
659 .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx)))
660 .into_any_element()
661 };
662
663 if self.load_credentials_task.is_some() {
664 div().child(Label::new("Loading credentials...")).into_any()
665 } else {
666 v_flex().size_full().child(api_key_section).into_any()
667 }
668 }
669}