1use anyhow::Result;
2use collections::BTreeMap;
3use credentials_provider::CredentialsProvider;
4use futures::{FutureExt, StreamExt, future::BoxFuture};
5use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window};
6use http_client::HttpClient;
7use language_model::{
8 ApiKeyState, AuthenticateError, EnvVar, IconOrSvg, LanguageModel, LanguageModelCompletionError,
9 LanguageModelCompletionEvent, LanguageModelId, LanguageModelName, LanguageModelProvider,
10 LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState,
11 LanguageModelRequest, LanguageModelToolChoice, RateLimiter, Role, env_var,
12};
13use opencode::{ApiProtocol, OPENCODE_API_URL};
14pub use settings::OpenCodeAvailableModel as AvailableModel;
15use settings::{Settings, SettingsStore};
16use std::sync::{Arc, LazyLock};
17use strum::IntoEnumIterator;
18use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*};
19use ui_input::InputField;
20use util::ResultExt;
21
22use crate::provider::anthropic::{AnthropicEventMapper, into_anthropic};
23use crate::provider::google::{GoogleEventMapper, into_google};
24use crate::provider::open_ai::{
25 OpenAiEventMapper, OpenAiResponseEventMapper, into_open_ai, into_open_ai_response,
26};
27
28const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("opencode");
29const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("OpenCode Zen");
30
31const API_KEY_ENV_VAR_NAME: &str = "OPENCODE_API_KEY";
32static API_KEY_ENV_VAR: LazyLock<EnvVar> = env_var!(API_KEY_ENV_VAR_NAME);
33
34#[derive(Default, Clone, Debug, PartialEq)]
35pub struct OpenCodeSettings {
36 pub api_url: String,
37 pub available_models: Vec<AvailableModel>,
38}
39
40pub struct OpenCodeLanguageModelProvider {
41 http_client: Arc<dyn HttpClient>,
42 state: Entity<State>,
43}
44
45pub struct State {
46 api_key_state: ApiKeyState,
47 credentials_provider: Arc<dyn CredentialsProvider>,
48}
49
50impl State {
51 fn is_authenticated(&self) -> bool {
52 self.api_key_state.has_key()
53 }
54
55 fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
56 let credentials_provider = self.credentials_provider.clone();
57 let api_url = OpenCodeLanguageModelProvider::api_url(cx);
58 self.api_key_state.store(
59 api_url,
60 api_key,
61 |this| &mut this.api_key_state,
62 credentials_provider,
63 cx,
64 )
65 }
66
67 fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
68 let credentials_provider = self.credentials_provider.clone();
69 let api_url = OpenCodeLanguageModelProvider::api_url(cx);
70 self.api_key_state.load_if_needed(
71 api_url,
72 |this| &mut this.api_key_state,
73 credentials_provider,
74 cx,
75 )
76 }
77}
78
79impl OpenCodeLanguageModelProvider {
80 pub fn new(
81 http_client: Arc<dyn HttpClient>,
82 credentials_provider: Arc<dyn CredentialsProvider>,
83 cx: &mut App,
84 ) -> Self {
85 let state = cx.new(|cx| {
86 cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
87 let credentials_provider = this.credentials_provider.clone();
88 let api_url = Self::api_url(cx);
89 this.api_key_state.handle_url_change(
90 api_url,
91 |this| &mut this.api_key_state,
92 credentials_provider,
93 cx,
94 );
95 cx.notify();
96 })
97 .detach();
98 State {
99 api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()),
100 credentials_provider,
101 }
102 });
103
104 Self { http_client, state }
105 }
106
107 fn create_language_model(&self, model: opencode::Model) -> Arc<dyn LanguageModel> {
108 Arc::new(OpenCodeLanguageModel {
109 id: LanguageModelId::from(model.id().to_string()),
110 model,
111 state: self.state.clone(),
112 http_client: self.http_client.clone(),
113 request_limiter: RateLimiter::new(4),
114 })
115 }
116
117 pub fn settings(cx: &App) -> &OpenCodeSettings {
118 &crate::AllLanguageModelSettings::get_global(cx).opencode
119 }
120
121 fn api_url(cx: &App) -> SharedString {
122 let api_url = &Self::settings(cx).api_url;
123 if api_url.is_empty() {
124 OPENCODE_API_URL.into()
125 } else {
126 SharedString::new(api_url.as_str())
127 }
128 }
129}
130
131impl LanguageModelProviderState for OpenCodeLanguageModelProvider {
132 type ObservableEntity = State;
133
134 fn observable_entity(&self) -> Option<Entity<Self::ObservableEntity>> {
135 Some(self.state.clone())
136 }
137}
138
139impl LanguageModelProvider for OpenCodeLanguageModelProvider {
140 fn id(&self) -> LanguageModelProviderId {
141 PROVIDER_ID
142 }
143
144 fn name(&self) -> LanguageModelProviderName {
145 PROVIDER_NAME
146 }
147
148 fn icon(&self) -> IconOrSvg {
149 IconOrSvg::Icon(IconName::AiOpenCode)
150 }
151
152 fn default_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
153 Some(self.create_language_model(opencode::Model::default()))
154 }
155
156 fn default_fast_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
157 Some(self.create_language_model(opencode::Model::default_fast()))
158 }
159
160 fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
161 let mut models = BTreeMap::default();
162
163 for model in opencode::Model::iter() {
164 if !matches!(model, opencode::Model::Custom { .. }) {
165 models.insert(model.id().to_string(), model);
166 }
167 }
168
169 for model in &Self::settings(cx).available_models {
170 let protocol = match model.protocol.as_str() {
171 "anthropic" => ApiProtocol::Anthropic,
172 "openai_responses" => ApiProtocol::OpenAiResponses,
173 "openai_chat" => ApiProtocol::OpenAiChat,
174 "google" => ApiProtocol::Google,
175 _ => ApiProtocol::OpenAiChat, // default fallback
176 };
177 models.insert(
178 model.name.clone(),
179 opencode::Model::Custom {
180 name: model.name.clone(),
181 display_name: model.display_name.clone(),
182 max_tokens: model.max_tokens,
183 max_output_tokens: model.max_output_tokens,
184 protocol,
185 },
186 );
187 }
188
189 models
190 .into_values()
191 .map(|model| self.create_language_model(model))
192 .collect()
193 }
194
195 fn is_authenticated(&self, cx: &App) -> bool {
196 self.state.read(cx).is_authenticated()
197 }
198
199 fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
200 self.state.update(cx, |state, cx| state.authenticate(cx))
201 }
202
203 fn configuration_view(
204 &self,
205 _target_agent: language_model::ConfigurationViewTargetAgent,
206 window: &mut Window,
207 cx: &mut App,
208 ) -> AnyView {
209 cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx))
210 .into()
211 }
212
213 fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
214 self.state
215 .update(cx, |state, cx| state.set_api_key(None, cx))
216 }
217}
218
219pub struct OpenCodeLanguageModel {
220 id: LanguageModelId,
221 model: opencode::Model,
222 state: Entity<State>,
223 http_client: Arc<dyn HttpClient>,
224 request_limiter: RateLimiter,
225}
226
227impl OpenCodeLanguageModel {
228 /// Returns the base API URL (e.g., "https://opencode.ai/zen").
229 fn base_api_url(&self, cx: &AsyncApp) -> SharedString {
230 self.state
231 .read_with(cx, |_, cx| OpenCodeLanguageModelProvider::api_url(cx))
232 }
233
234 fn api_key(&self, cx: &AsyncApp) -> Option<Arc<str>> {
235 self.state.read_with(cx, |state, cx| {
236 let api_url = OpenCodeLanguageModelProvider::api_url(cx);
237 state.api_key_state.key(&api_url)
238 })
239 }
240
241 fn stream_anthropic(
242 &self,
243 request: anthropic::Request,
244 cx: &AsyncApp,
245 ) -> BoxFuture<
246 'static,
247 Result<
248 futures::stream::BoxStream<
249 'static,
250 Result<anthropic::Event, anthropic::AnthropicError>,
251 >,
252 LanguageModelCompletionError,
253 >,
254 > {
255 let http_client = self.http_client.clone();
256 // Anthropic crate appends /v1/messages to api_url
257 let api_url = self.base_api_url(cx);
258 let api_key = self.api_key(cx);
259
260 let future = self.request_limiter.stream(async move {
261 let Some(api_key) = api_key else {
262 return Err(LanguageModelCompletionError::NoApiKey {
263 provider: PROVIDER_NAME,
264 });
265 };
266 let request = anthropic::stream_completion(
267 http_client.as_ref(),
268 &api_url,
269 &api_key,
270 request,
271 None,
272 );
273 let response = request.await?;
274 Ok(response)
275 });
276
277 async move { Ok(future.await?.boxed()) }.boxed()
278 }
279
280 fn stream_openai_chat(
281 &self,
282 request: open_ai::Request,
283 cx: &AsyncApp,
284 ) -> BoxFuture<
285 'static,
286 Result<futures::stream::BoxStream<'static, Result<open_ai::ResponseStreamEvent>>>,
287 > {
288 let http_client = self.http_client.clone();
289 // OpenAI crate appends /chat/completions to api_url, so we pass base + "/v1"
290 let base_url = self.base_api_url(cx);
291 let api_url: SharedString = format!("{base_url}/v1").into();
292 let api_key = self.api_key(cx);
293 let provider_name = PROVIDER_NAME.0.to_string();
294
295 let future = self.request_limiter.stream(async move {
296 let Some(api_key) = api_key else {
297 return Err(LanguageModelCompletionError::NoApiKey {
298 provider: PROVIDER_NAME,
299 });
300 };
301 let request = open_ai::stream_completion(
302 http_client.as_ref(),
303 &provider_name,
304 &api_url,
305 &api_key,
306 request,
307 );
308 let response = request.await?;
309 Ok(response)
310 });
311
312 async move { Ok(future.await?.boxed()) }.boxed()
313 }
314
315 fn stream_openai_response(
316 &self,
317 request: open_ai::responses::Request,
318 cx: &AsyncApp,
319 ) -> BoxFuture<
320 'static,
321 Result<futures::stream::BoxStream<'static, Result<open_ai::responses::StreamEvent>>>,
322 > {
323 let http_client = self.http_client.clone();
324 // Responses crate appends /responses to api_url, so we pass base + "/v1"
325 let base_url = self.base_api_url(cx);
326 let api_url: SharedString = format!("{base_url}/v1").into();
327 let api_key = self.api_key(cx);
328 let provider_name = PROVIDER_NAME.0.to_string();
329
330 let future = self.request_limiter.stream(async move {
331 let Some(api_key) = api_key else {
332 return Err(LanguageModelCompletionError::NoApiKey {
333 provider: PROVIDER_NAME,
334 });
335 };
336 let request = open_ai::responses::stream_response(
337 http_client.as_ref(),
338 &provider_name,
339 &api_url,
340 &api_key,
341 request,
342 vec![],
343 );
344 let response = request.await?;
345 Ok(response)
346 });
347
348 async move { Ok(future.await?.boxed()) }.boxed()
349 }
350
351 fn stream_google_zen(
352 &self,
353 request: google_ai::GenerateContentRequest,
354 cx: &AsyncApp,
355 ) -> BoxFuture<
356 'static,
357 Result<futures::stream::BoxStream<'static, Result<google_ai::GenerateContentResponse>>>,
358 > {
359 let http_client = self.http_client.clone();
360 let api_url = self.base_api_url(cx);
361 let api_key = self.api_key(cx);
362
363 let future = self.request_limiter.stream(async move {
364 let Some(api_key) = api_key else {
365 return Err(LanguageModelCompletionError::NoApiKey {
366 provider: PROVIDER_NAME,
367 });
368 };
369 let request = opencode::stream_generate_content_zen(
370 http_client.as_ref(),
371 &api_url,
372 &api_key,
373 request,
374 );
375 let response = request.await?;
376 Ok(response)
377 });
378
379 async move { Ok(future.await?.boxed()) }.boxed()
380 }
381}
382
383impl LanguageModel for OpenCodeLanguageModel {
384 fn id(&self) -> LanguageModelId {
385 self.id.clone()
386 }
387
388 fn name(&self) -> LanguageModelName {
389 LanguageModelName::from(self.model.display_name().to_string())
390 }
391
392 fn provider_id(&self) -> LanguageModelProviderId {
393 PROVIDER_ID
394 }
395
396 fn provider_name(&self) -> LanguageModelProviderName {
397 PROVIDER_NAME
398 }
399
400 fn supports_tools(&self) -> bool {
401 self.model.supports_tools()
402 }
403
404 fn supports_images(&self) -> bool {
405 self.model.supports_images()
406 }
407
408 fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
409 match choice {
410 LanguageModelToolChoice::Auto | LanguageModelToolChoice::Any => true,
411 LanguageModelToolChoice::None => {
412 // Google models don't support None tool choice
413 self.model.protocol() != ApiProtocol::Google
414 }
415 }
416 }
417
418 fn telemetry_id(&self) -> String {
419 format!("opencode/{}", self.model.id())
420 }
421
422 fn max_token_count(&self) -> u64 {
423 self.model.max_token_count()
424 }
425
426 fn max_output_tokens(&self) -> Option<u64> {
427 self.model.max_output_tokens()
428 }
429
430 fn count_tokens(
431 &self,
432 request: LanguageModelRequest,
433 cx: &App,
434 ) -> BoxFuture<'static, Result<u64>> {
435 cx.background_spawn(async move {
436 let messages = request
437 .messages
438 .into_iter()
439 .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
440 role: match message.role {
441 Role::User => "user".into(),
442 Role::Assistant => "assistant".into(),
443 Role::System => "system".into(),
444 },
445 content: Some(message.string_contents()),
446 name: None,
447 function_call: None,
448 })
449 .collect::<Vec<_>>();
450
451 tiktoken_rs::num_tokens_from_messages("gpt-4o", &messages).map(|tokens| tokens as u64)
452 })
453 .boxed()
454 }
455
456 fn stream_completion(
457 &self,
458 request: LanguageModelRequest,
459 cx: &AsyncApp,
460 ) -> BoxFuture<
461 'static,
462 Result<
463 futures::stream::BoxStream<
464 'static,
465 Result<LanguageModelCompletionEvent, LanguageModelCompletionError>,
466 >,
467 LanguageModelCompletionError,
468 >,
469 > {
470 match self.model.protocol() {
471 ApiProtocol::Anthropic => {
472 let anthropic_request = into_anthropic(
473 request,
474 self.model.id().to_string(),
475 1.0,
476 self.model.max_output_tokens().unwrap_or(8192),
477 anthropic::AnthropicModelMode::Default,
478 );
479 let stream = self.stream_anthropic(anthropic_request, cx);
480 async move {
481 let mapper = AnthropicEventMapper::new();
482 Ok(mapper.map_stream(stream.await?).boxed())
483 }
484 .boxed()
485 }
486 ApiProtocol::OpenAiChat => {
487 let openai_request = into_open_ai(
488 request,
489 self.model.id(),
490 false,
491 false,
492 self.model.max_output_tokens(),
493 None,
494 );
495 let stream = self.stream_openai_chat(openai_request, cx);
496 async move {
497 let mapper = OpenAiEventMapper::new();
498 Ok(mapper.map_stream(stream.await?).boxed())
499 }
500 .boxed()
501 }
502 ApiProtocol::OpenAiResponses => {
503 let response_request = into_open_ai_response(
504 request,
505 self.model.id(),
506 false,
507 false,
508 self.model.max_output_tokens(),
509 None,
510 );
511 let stream = self.stream_openai_response(response_request, cx);
512 async move {
513 let mapper = OpenAiResponseEventMapper::new();
514 Ok(mapper.map_stream(stream.await?).boxed())
515 }
516 .boxed()
517 }
518 ApiProtocol::Google => {
519 let google_request = into_google(
520 request,
521 self.model.id().to_string(),
522 google_ai::GoogleModelMode::Default,
523 );
524 let stream = self.stream_google_zen(google_request, cx);
525 async move {
526 let mapper = GoogleEventMapper::new();
527 Ok(mapper.map_stream(stream.await?.boxed()).boxed())
528 }
529 .boxed()
530 }
531 }
532 }
533}
534
535struct ConfigurationView {
536 api_key_editor: Entity<InputField>,
537 state: Entity<State>,
538 load_credentials_task: Option<Task<()>>,
539}
540
541impl ConfigurationView {
542 fn new(state: Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
543 let api_key_editor = cx.new(|cx| {
544 InputField::new(window, cx, "sk-00000000000000000000000000000000").label("API key")
545 });
546
547 cx.observe(&state, |_, _, cx| {
548 cx.notify();
549 })
550 .detach();
551
552 let load_credentials_task = Some(cx.spawn_in(window, {
553 let state = state.clone();
554 async move |this, cx| {
555 if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) {
556 let _ = task.await;
557 }
558 this.update(cx, |this, cx| {
559 this.load_credentials_task = None;
560 cx.notify();
561 })
562 .log_err();
563 }
564 }));
565
566 Self {
567 api_key_editor,
568 state,
569 load_credentials_task,
570 }
571 }
572
573 fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
574 let api_key = self.api_key_editor.read(cx).text(cx).trim().to_string();
575 if api_key.is_empty() {
576 return;
577 }
578
579 self.api_key_editor
580 .update(cx, |editor, cx| editor.set_text("", window, cx));
581
582 let state = self.state.clone();
583 cx.spawn_in(window, async move |_, cx| {
584 state
585 .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))
586 .await
587 })
588 .detach_and_log_err(cx);
589 }
590
591 fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context<Self>) {
592 self.api_key_editor
593 .update(cx, |editor, cx| editor.set_text("", window, cx));
594
595 let state = self.state.clone();
596 cx.spawn_in(window, async move |_, cx| {
597 state
598 .update(cx, |state, cx| state.set_api_key(None, cx))
599 .await
600 })
601 .detach_and_log_err(cx);
602 }
603
604 fn should_render_editor(&self, cx: &mut Context<Self>) -> bool {
605 !self.state.read(cx).is_authenticated()
606 }
607}
608
609impl Render for ConfigurationView {
610 fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
611 let env_var_set = self.state.read(cx).api_key_state.is_from_env_var();
612 let configured_card_label = if env_var_set {
613 format!("API key set in {API_KEY_ENV_VAR_NAME} environment variable")
614 } else {
615 let api_url = OpenCodeLanguageModelProvider::api_url(cx);
616 if api_url == OPENCODE_API_URL {
617 "API key configured".to_string()
618 } else {
619 format!("API key configured for {}", api_url)
620 }
621 };
622
623 let api_key_section = if self.should_render_editor(cx) {
624 v_flex()
625 .on_action(cx.listener(Self::save_api_key))
626 .child(Label::new(
627 "To use OpenCode Zen models in Zed, you need an API key:",
628 ))
629 .child(
630 List::new()
631 .child(
632 ListBulletItem::new("")
633 .child(Label::new("Sign in and get your key at"))
634 .child(ButtonLink::new(
635 "OpenCode Zen Console",
636 "https://opencode.ai/zen",
637 )),
638 )
639 .child(ListBulletItem::new(
640 "Paste your API key below and hit enter to start using OpenCode Zen",
641 )),
642 )
643 .child(self.api_key_editor.clone())
644 .child(
645 Label::new(format!(
646 "You can also set the {API_KEY_ENV_VAR_NAME} environment variable and restart Zed."
647 ))
648 .size(LabelSize::Small)
649 .color(Color::Muted),
650 )
651 .into_any_element()
652 } else {
653 ConfiguredApiCard::new(configured_card_label)
654 .disabled(env_var_set)
655 .when(env_var_set, |this| {
656 this.tooltip_label(format!(
657 "To reset your API key, unset the {API_KEY_ENV_VAR_NAME} environment variable."
658 ))
659 })
660 .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx)))
661 .into_any_element()
662 };
663
664 if self.load_credentials_task.is_some() {
665 div().child(Label::new("Loading credentials...")).into_any()
666 } else {
667 v_flex().size_full().child(api_key_section).into_any()
668 }
669 }
670}