opencode.rs

  1use anyhow::Result;
  2use collections::BTreeMap;
  3use futures::{FutureExt, StreamExt, future::BoxFuture};
  4use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window};
  5use http_client::HttpClient;
  6use language_model::{
  7    ApiKeyState, AuthenticateError, EnvVar, IconOrSvg, LanguageModel, LanguageModelCompletionError,
  8    LanguageModelCompletionEvent, LanguageModelId, LanguageModelName, LanguageModelProvider,
  9    LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState,
 10    LanguageModelRequest, LanguageModelToolChoice, RateLimiter, Role, env_var,
 11};
 12use opencode::{ApiProtocol, OPENCODE_API_URL};
 13pub use settings::OpenCodeAvailableModel as AvailableModel;
 14use settings::{Settings, SettingsStore};
 15use std::sync::{Arc, LazyLock};
 16use strum::IntoEnumIterator;
 17use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*};
 18use ui_input::InputField;
 19use util::ResultExt;
 20
 21use crate::provider::anthropic::{AnthropicEventMapper, into_anthropic};
 22use crate::provider::google::{GoogleEventMapper, into_google};
 23use crate::provider::open_ai::{
 24    OpenAiEventMapper, OpenAiResponseEventMapper, into_open_ai, into_open_ai_response,
 25};
 26
 27const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("opencode");
 28const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("OpenCode Zen");
 29
 30const API_KEY_ENV_VAR_NAME: &str = "OPENCODE_API_KEY";
 31static API_KEY_ENV_VAR: LazyLock<EnvVar> = env_var!(API_KEY_ENV_VAR_NAME);
 32
 33#[derive(Default, Clone, Debug, PartialEq)]
 34pub struct OpenCodeSettings {
 35    pub api_url: String,
 36    pub available_models: Vec<AvailableModel>,
 37}
 38
 39pub struct OpenCodeLanguageModelProvider {
 40    http_client: Arc<dyn HttpClient>,
 41    state: Entity<State>,
 42}
 43
 44pub struct State {
 45    api_key_state: ApiKeyState,
 46}
 47
 48impl State {
 49    fn is_authenticated(&self) -> bool {
 50        self.api_key_state.has_key()
 51    }
 52
 53    fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
 54        let api_url = OpenCodeLanguageModelProvider::api_url(cx);
 55        self.api_key_state
 56            .store(api_url, api_key, |this| &mut this.api_key_state, cx)
 57    }
 58
 59    fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
 60        let api_url = OpenCodeLanguageModelProvider::api_url(cx);
 61        self.api_key_state
 62            .load_if_needed(api_url, |this| &mut this.api_key_state, cx)
 63    }
 64}
 65
 66impl OpenCodeLanguageModelProvider {
 67    pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
 68        let state = cx.new(|cx| {
 69            cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
 70                let api_url = Self::api_url(cx);
 71                this.api_key_state
 72                    .handle_url_change(api_url, |this| &mut this.api_key_state, cx);
 73                cx.notify();
 74            })
 75            .detach();
 76            State {
 77                api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()),
 78            }
 79        });
 80
 81        Self { http_client, state }
 82    }
 83
 84    fn create_language_model(&self, model: opencode::Model) -> Arc<dyn LanguageModel> {
 85        Arc::new(OpenCodeLanguageModel {
 86            id: LanguageModelId::from(model.id().to_string()),
 87            model,
 88            state: self.state.clone(),
 89            http_client: self.http_client.clone(),
 90            request_limiter: RateLimiter::new(4),
 91        })
 92    }
 93
 94    pub fn settings(cx: &App) -> &OpenCodeSettings {
 95        &crate::AllLanguageModelSettings::get_global(cx).opencode
 96    }
 97
 98    fn api_url(cx: &App) -> SharedString {
 99        let api_url = &Self::settings(cx).api_url;
100        if api_url.is_empty() {
101            OPENCODE_API_URL.into()
102        } else {
103            SharedString::new(api_url.as_str())
104        }
105    }
106}
107
108impl LanguageModelProviderState for OpenCodeLanguageModelProvider {
109    type ObservableEntity = State;
110
111    fn observable_entity(&self) -> Option<Entity<Self::ObservableEntity>> {
112        Some(self.state.clone())
113    }
114}
115
116impl LanguageModelProvider for OpenCodeLanguageModelProvider {
117    fn id(&self) -> LanguageModelProviderId {
118        PROVIDER_ID
119    }
120
121    fn name(&self) -> LanguageModelProviderName {
122        PROVIDER_NAME
123    }
124
125    fn icon(&self) -> IconOrSvg {
126        IconOrSvg::Icon(IconName::AiOpenCode)
127    }
128
129    fn default_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
130        Some(self.create_language_model(opencode::Model::default()))
131    }
132
133    fn default_fast_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
134        Some(self.create_language_model(opencode::Model::default_fast()))
135    }
136
137    fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
138        let mut models = BTreeMap::default();
139
140        for model in opencode::Model::iter() {
141            if !matches!(model, opencode::Model::Custom { .. }) {
142                models.insert(model.id().to_string(), model);
143            }
144        }
145
146        for model in &Self::settings(cx).available_models {
147            let protocol = match model.protocol.as_str() {
148                "anthropic" => ApiProtocol::Anthropic,
149                "openai_responses" => ApiProtocol::OpenAiResponses,
150                "openai_chat" => ApiProtocol::OpenAiChat,
151                "google" => ApiProtocol::Google,
152                _ => ApiProtocol::OpenAiChat, // default fallback
153            };
154            models.insert(
155                model.name.clone(),
156                opencode::Model::Custom {
157                    name: model.name.clone(),
158                    display_name: model.display_name.clone(),
159                    max_tokens: model.max_tokens,
160                    max_output_tokens: model.max_output_tokens,
161                    protocol,
162                },
163            );
164        }
165
166        models
167            .into_values()
168            .map(|model| self.create_language_model(model))
169            .collect()
170    }
171
172    fn is_authenticated(&self, cx: &App) -> bool {
173        self.state.read(cx).is_authenticated()
174    }
175
176    fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
177        self.state.update(cx, |state, cx| state.authenticate(cx))
178    }
179
180    fn configuration_view(
181        &self,
182        _target_agent: language_model::ConfigurationViewTargetAgent,
183        window: &mut Window,
184        cx: &mut App,
185    ) -> AnyView {
186        cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx))
187            .into()
188    }
189
190    fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
191        self.state
192            .update(cx, |state, cx| state.set_api_key(None, cx))
193    }
194}
195
196pub struct OpenCodeLanguageModel {
197    id: LanguageModelId,
198    model: opencode::Model,
199    state: Entity<State>,
200    http_client: Arc<dyn HttpClient>,
201    request_limiter: RateLimiter,
202}
203
204impl OpenCodeLanguageModel {
205    /// Returns the base API URL (e.g., "https://opencode.ai/zen").
206    fn base_api_url(&self, cx: &AsyncApp) -> SharedString {
207        self.state
208            .read_with(cx, |_, cx| OpenCodeLanguageModelProvider::api_url(cx))
209    }
210
211    fn api_key(&self, cx: &AsyncApp) -> Option<Arc<str>> {
212        self.state.read_with(cx, |state, cx| {
213            let api_url = OpenCodeLanguageModelProvider::api_url(cx);
214            state.api_key_state.key(&api_url)
215        })
216    }
217
218    fn stream_anthropic(
219        &self,
220        request: anthropic::Request,
221        cx: &AsyncApp,
222    ) -> BoxFuture<
223        'static,
224        Result<
225            futures::stream::BoxStream<
226                'static,
227                Result<anthropic::Event, anthropic::AnthropicError>,
228            >,
229            LanguageModelCompletionError,
230        >,
231    > {
232        let http_client = self.http_client.clone();
233        // Anthropic crate appends /v1/messages to api_url
234        let api_url = self.base_api_url(cx);
235        let api_key = self.api_key(cx);
236
237        let future = self.request_limiter.stream(async move {
238            let Some(api_key) = api_key else {
239                return Err(LanguageModelCompletionError::NoApiKey {
240                    provider: PROVIDER_NAME,
241                });
242            };
243            let request = anthropic::stream_completion(
244                http_client.as_ref(),
245                &api_url,
246                &api_key,
247                request,
248                None,
249            );
250            let response = request.await?;
251            Ok(response)
252        });
253
254        async move { Ok(future.await?.boxed()) }.boxed()
255    }
256
257    fn stream_openai_chat(
258        &self,
259        request: open_ai::Request,
260        cx: &AsyncApp,
261    ) -> BoxFuture<
262        'static,
263        Result<futures::stream::BoxStream<'static, Result<open_ai::ResponseStreamEvent>>>,
264    > {
265        let http_client = self.http_client.clone();
266        // OpenAI crate appends /chat/completions to api_url, so we pass base + "/v1"
267        let base_url = self.base_api_url(cx);
268        let api_url: SharedString = format!("{base_url}/v1").into();
269        let api_key = self.api_key(cx);
270        let provider_name = PROVIDER_NAME.0.to_string();
271
272        let future = self.request_limiter.stream(async move {
273            let Some(api_key) = api_key else {
274                return Err(LanguageModelCompletionError::NoApiKey {
275                    provider: PROVIDER_NAME,
276                });
277            };
278            let request = open_ai::stream_completion(
279                http_client.as_ref(),
280                &provider_name,
281                &api_url,
282                &api_key,
283                request,
284            );
285            let response = request.await?;
286            Ok(response)
287        });
288
289        async move { Ok(future.await?.boxed()) }.boxed()
290    }
291
292    fn stream_openai_response(
293        &self,
294        request: open_ai::responses::Request,
295        cx: &AsyncApp,
296    ) -> BoxFuture<
297        'static,
298        Result<futures::stream::BoxStream<'static, Result<open_ai::responses::StreamEvent>>>,
299    > {
300        let http_client = self.http_client.clone();
301        // Responses crate appends /responses to api_url, so we pass base + "/v1"
302        let base_url = self.base_api_url(cx);
303        let api_url: SharedString = format!("{base_url}/v1").into();
304        let api_key = self.api_key(cx);
305        let provider_name = PROVIDER_NAME.0.to_string();
306
307        let future = self.request_limiter.stream(async move {
308            let Some(api_key) = api_key else {
309                return Err(LanguageModelCompletionError::NoApiKey {
310                    provider: PROVIDER_NAME,
311                });
312            };
313            let request = open_ai::responses::stream_response(
314                http_client.as_ref(),
315                &provider_name,
316                &api_url,
317                &api_key,
318                request,
319            );
320            let response = request.await?;
321            Ok(response)
322        });
323
324        async move { Ok(future.await?.boxed()) }.boxed()
325    }
326
327    fn stream_google_zen(
328        &self,
329        request: google_ai::GenerateContentRequest,
330        cx: &AsyncApp,
331    ) -> BoxFuture<
332        'static,
333        Result<futures::stream::BoxStream<'static, Result<google_ai::GenerateContentResponse>>>,
334    > {
335        let http_client = self.http_client.clone();
336        let api_url = self.base_api_url(cx);
337        let api_key = self.api_key(cx);
338
339        let future = self.request_limiter.stream(async move {
340            let Some(api_key) = api_key else {
341                return Err(LanguageModelCompletionError::NoApiKey {
342                    provider: PROVIDER_NAME,
343                });
344            };
345            let request = opencode::stream_generate_content_zen(
346                http_client.as_ref(),
347                &api_url,
348                &api_key,
349                request,
350            );
351            let response = request.await?;
352            Ok(response)
353        });
354
355        async move { Ok(future.await?.boxed()) }.boxed()
356    }
357}
358
359impl LanguageModel for OpenCodeLanguageModel {
360    fn id(&self) -> LanguageModelId {
361        self.id.clone()
362    }
363
364    fn name(&self) -> LanguageModelName {
365        LanguageModelName::from(self.model.display_name().to_string())
366    }
367
368    fn provider_id(&self) -> LanguageModelProviderId {
369        PROVIDER_ID
370    }
371
372    fn provider_name(&self) -> LanguageModelProviderName {
373        PROVIDER_NAME
374    }
375
376    fn supports_tools(&self) -> bool {
377        self.model.supports_tools()
378    }
379
380    fn supports_images(&self) -> bool {
381        self.model.supports_images()
382    }
383
384    fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
385        match choice {
386            LanguageModelToolChoice::Auto | LanguageModelToolChoice::Any => true,
387            LanguageModelToolChoice::None => {
388                // Google models don't support None tool choice
389                self.model.protocol() != ApiProtocol::Google
390            }
391        }
392    }
393
394    fn telemetry_id(&self) -> String {
395        format!("opencode/{}", self.model.id())
396    }
397
398    fn max_token_count(&self) -> u64 {
399        self.model.max_token_count()
400    }
401
402    fn max_output_tokens(&self) -> Option<u64> {
403        self.model.max_output_tokens()
404    }
405
406    fn count_tokens(
407        &self,
408        request: LanguageModelRequest,
409        cx: &App,
410    ) -> BoxFuture<'static, Result<u64>> {
411        cx.background_spawn(async move {
412            let messages = request
413                .messages
414                .into_iter()
415                .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
416                    role: match message.role {
417                        Role::User => "user".into(),
418                        Role::Assistant => "assistant".into(),
419                        Role::System => "system".into(),
420                    },
421                    content: Some(message.string_contents()),
422                    name: None,
423                    function_call: None,
424                })
425                .collect::<Vec<_>>();
426
427            tiktoken_rs::num_tokens_from_messages("gpt-4o", &messages).map(|tokens| tokens as u64)
428        })
429        .boxed()
430    }
431
432    fn stream_completion(
433        &self,
434        request: LanguageModelRequest,
435        cx: &AsyncApp,
436    ) -> BoxFuture<
437        'static,
438        Result<
439            futures::stream::BoxStream<
440                'static,
441                Result<LanguageModelCompletionEvent, LanguageModelCompletionError>,
442            >,
443            LanguageModelCompletionError,
444        >,
445    > {
446        match self.model.protocol() {
447            ApiProtocol::Anthropic => {
448                let anthropic_request = into_anthropic(
449                    request,
450                    self.model.id().to_string(),
451                    1.0,
452                    self.model.max_output_tokens().unwrap_or(8192),
453                    anthropic::AnthropicModelMode::Default,
454                );
455                let stream = self.stream_anthropic(anthropic_request, cx);
456                async move {
457                    let mapper = AnthropicEventMapper::new();
458                    Ok(mapper.map_stream(stream.await?).boxed())
459                }
460                .boxed()
461            }
462            ApiProtocol::OpenAiChat => {
463                let openai_request = into_open_ai(
464                    request,
465                    self.model.id(),
466                    false,
467                    false,
468                    self.model.max_output_tokens(),
469                    None,
470                );
471                let stream = self.stream_openai_chat(openai_request, cx);
472                async move {
473                    let mapper = OpenAiEventMapper::new();
474                    Ok(mapper.map_stream(stream.await?).boxed())
475                }
476                .boxed()
477            }
478            ApiProtocol::OpenAiResponses => {
479                let response_request = into_open_ai_response(
480                    request,
481                    self.model.id(),
482                    false,
483                    false,
484                    self.model.max_output_tokens(),
485                    None,
486                );
487                let stream = self.stream_openai_response(response_request, cx);
488                async move {
489                    let mapper = OpenAiResponseEventMapper::new();
490                    Ok(mapper.map_stream(stream.await?).boxed())
491                }
492                .boxed()
493            }
494            ApiProtocol::Google => {
495                let google_request = into_google(
496                    request,
497                    self.model.id().to_string(),
498                    google_ai::GoogleModelMode::Default,
499                );
500                let stream = self.stream_google_zen(google_request, cx);
501                async move {
502                    let mapper = GoogleEventMapper::new();
503                    Ok(mapper.map_stream(stream.await?.boxed()).boxed())
504                }
505                .boxed()
506            }
507        }
508    }
509}
510
511struct ConfigurationView {
512    api_key_editor: Entity<InputField>,
513    state: Entity<State>,
514    load_credentials_task: Option<Task<()>>,
515}
516
517impl ConfigurationView {
518    fn new(state: Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
519        let api_key_editor = cx.new(|cx| {
520            InputField::new(window, cx, "sk-00000000000000000000000000000000").label("API key")
521        });
522
523        cx.observe(&state, |_, _, cx| {
524            cx.notify();
525        })
526        .detach();
527
528        let load_credentials_task = Some(cx.spawn_in(window, {
529            let state = state.clone();
530            async move |this, cx| {
531                if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) {
532                    let _ = task.await;
533                }
534                this.update(cx, |this, cx| {
535                    this.load_credentials_task = None;
536                    cx.notify();
537                })
538                .log_err();
539            }
540        }));
541
542        Self {
543            api_key_editor,
544            state,
545            load_credentials_task,
546        }
547    }
548
549    fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
550        let api_key = self.api_key_editor.read(cx).text(cx).trim().to_string();
551        if api_key.is_empty() {
552            return;
553        }
554
555        self.api_key_editor
556            .update(cx, |editor, cx| editor.set_text("", window, cx));
557
558        let state = self.state.clone();
559        cx.spawn_in(window, async move |_, cx| {
560            state
561                .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))
562                .await
563        })
564        .detach_and_log_err(cx);
565    }
566
567    fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context<Self>) {
568        self.api_key_editor
569            .update(cx, |editor, cx| editor.set_text("", window, cx));
570
571        let state = self.state.clone();
572        cx.spawn_in(window, async move |_, cx| {
573            state
574                .update(cx, |state, cx| state.set_api_key(None, cx))
575                .await
576        })
577        .detach_and_log_err(cx);
578    }
579
580    fn should_render_editor(&self, cx: &mut Context<Self>) -> bool {
581        !self.state.read(cx).is_authenticated()
582    }
583}
584
585impl Render for ConfigurationView {
586    fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
587        let env_var_set = self.state.read(cx).api_key_state.is_from_env_var();
588        let configured_card_label = if env_var_set {
589            format!("API key set in {API_KEY_ENV_VAR_NAME} environment variable")
590        } else {
591            let api_url = OpenCodeLanguageModelProvider::api_url(cx);
592            if api_url == OPENCODE_API_URL {
593                "API key configured".to_string()
594            } else {
595                format!("API key configured for {}", api_url)
596            }
597        };
598
599        let api_key_section = if self.should_render_editor(cx) {
600            v_flex()
601                .on_action(cx.listener(Self::save_api_key))
602                .child(Label::new(
603                    "To use OpenCode Zen models in Zed, you need an API key:",
604                ))
605                .child(
606                    List::new()
607                        .child(
608                            ListBulletItem::new("")
609                                .child(Label::new("Sign in and get your key at"))
610                                .child(ButtonLink::new(
611                                    "OpenCode Zen Console",
612                                    "https://opencode.ai/zen",
613                                )),
614                        )
615                        .child(ListBulletItem::new(
616                            "Paste your API key below and hit enter to start using OpenCode Zen",
617                        )),
618                )
619                .child(self.api_key_editor.clone())
620                .child(
621                    Label::new(format!(
622                        "You can also set the {API_KEY_ENV_VAR_NAME} environment variable and restart Zed."
623                    ))
624                    .size(LabelSize::Small)
625                    .color(Color::Muted),
626                )
627                .into_any_element()
628        } else {
629            ConfiguredApiCard::new(configured_card_label)
630                .disabled(env_var_set)
631                .when(env_var_set, |this| {
632                    this.tooltip_label(format!(
633                        "To reset your API key, unset the {API_KEY_ENV_VAR_NAME} environment variable."
634                    ))
635                })
636                .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx)))
637                .into_any_element()
638        };
639
640        if self.load_credentials_task.is_some() {
641            div().child(Label::new("Loading credentials...")).into_any()
642        } else {
643            v_flex().size_full().child(api_key_section).into_any()
644        }
645    }
646}