1use anyhow::{Result, anyhow};
2use collections::HashMap;
3use futures::{FutureExt, Stream, StreamExt, future, future::BoxFuture};
4use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task};
5use http_client::HttpClient;
6use language_model::{
7 AuthenticateError, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
8 LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
9 LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
10 LanguageModelToolChoice, LanguageModelToolResultContent, LanguageModelToolSchemaFormat,
11 LanguageModelToolUse, MessageContent, RateLimiter, Role, StopReason, TokenUsage,
12};
13use open_router::{
14 Model, ModelMode as OpenRouterModelMode, OPEN_ROUTER_API_URL, ResponseStreamEvent, list_models,
15};
16use settings::{OpenRouterAvailableModel as AvailableModel, Settings, SettingsStore};
17use std::pin::Pin;
18use std::str::FromStr as _;
19use std::sync::{Arc, LazyLock};
20use ui::{List, prelude::*};
21use ui_input::InputField;
22use util::ResultExt;
23use zed_env_vars::{EnvVar, env_var};
24
25use crate::ui::ConfiguredApiCard;
26use crate::{api_key::ApiKeyState, ui::InstructionListItem};
27
28const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("openrouter");
29const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("OpenRouter");
30
31const API_KEY_ENV_VAR_NAME: &str = "OPENROUTER_API_KEY";
32static API_KEY_ENV_VAR: LazyLock<EnvVar> = env_var!(API_KEY_ENV_VAR_NAME);
33
34#[derive(Default, Clone, Debug, PartialEq)]
35pub struct OpenRouterSettings {
36 pub api_url: String,
37 pub available_models: Vec<AvailableModel>,
38}
39
40pub struct OpenRouterLanguageModelProvider {
41 http_client: Arc<dyn HttpClient>,
42 state: Entity<State>,
43}
44
45pub struct State {
46 api_key_state: ApiKeyState,
47 http_client: Arc<dyn HttpClient>,
48 available_models: Vec<open_router::Model>,
49 fetch_models_task: Option<Task<Result<(), LanguageModelCompletionError>>>,
50}
51
52impl State {
53 fn is_authenticated(&self) -> bool {
54 self.api_key_state.has_key()
55 }
56
57 fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
58 let api_url = OpenRouterLanguageModelProvider::api_url(cx);
59 self.api_key_state
60 .store(api_url, api_key, |this| &mut this.api_key_state, cx)
61 }
62
63 fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
64 let api_url = OpenRouterLanguageModelProvider::api_url(cx);
65 let task = self.api_key_state.load_if_needed(
66 api_url,
67 &API_KEY_ENV_VAR,
68 |this| &mut this.api_key_state,
69 cx,
70 );
71
72 cx.spawn(async move |this, cx| {
73 let result = task.await;
74 this.update(cx, |this, cx| this.restart_fetch_models_task(cx))
75 .ok();
76 result
77 })
78 }
79
80 fn fetch_models(
81 &mut self,
82 cx: &mut Context<Self>,
83 ) -> Task<Result<(), LanguageModelCompletionError>> {
84 let http_client = self.http_client.clone();
85 let api_url = OpenRouterLanguageModelProvider::api_url(cx);
86 let Some(api_key) = self.api_key_state.key(&api_url) else {
87 return Task::ready(Err(LanguageModelCompletionError::NoApiKey {
88 provider: PROVIDER_NAME,
89 }));
90 };
91 cx.spawn(async move |this, cx| {
92 let models = list_models(http_client.as_ref(), &api_url, &api_key)
93 .await
94 .map_err(|e| {
95 LanguageModelCompletionError::Other(anyhow::anyhow!(
96 "OpenRouter error: {:?}",
97 e
98 ))
99 })?;
100
101 this.update(cx, |this, cx| {
102 this.available_models = models;
103 cx.notify();
104 })
105 .map_err(|e| LanguageModelCompletionError::Other(e))?;
106
107 Ok(())
108 })
109 }
110
111 fn restart_fetch_models_task(&mut self, cx: &mut Context<Self>) {
112 if self.is_authenticated() {
113 let task = self.fetch_models(cx);
114 self.fetch_models_task.replace(task);
115 } else {
116 self.available_models = Vec::new();
117 }
118 }
119}
120
121impl OpenRouterLanguageModelProvider {
122 pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
123 let state = cx.new(|cx| {
124 cx.observe_global::<SettingsStore>({
125 let mut last_settings = OpenRouterLanguageModelProvider::settings(cx).clone();
126 move |this: &mut State, cx| {
127 let current_settings = OpenRouterLanguageModelProvider::settings(cx);
128 let settings_changed = current_settings != &last_settings;
129 if settings_changed {
130 last_settings = current_settings.clone();
131 this.authenticate(cx).detach();
132 cx.notify();
133 }
134 }
135 })
136 .detach();
137 State {
138 api_key_state: ApiKeyState::new(Self::api_url(cx)),
139 http_client: http_client.clone(),
140 available_models: Vec::new(),
141 fetch_models_task: None,
142 }
143 });
144
145 Self { http_client, state }
146 }
147
148 fn settings(cx: &App) -> &OpenRouterSettings {
149 &crate::AllLanguageModelSettings::get_global(cx).open_router
150 }
151
152 fn api_url(cx: &App) -> SharedString {
153 let api_url = &Self::settings(cx).api_url;
154 if api_url.is_empty() {
155 OPEN_ROUTER_API_URL.into()
156 } else {
157 SharedString::new(api_url.as_str())
158 }
159 }
160
161 fn create_language_model(&self, model: open_router::Model) -> Arc<dyn LanguageModel> {
162 Arc::new(OpenRouterLanguageModel {
163 id: LanguageModelId::from(model.id().to_string()),
164 model,
165 state: self.state.clone(),
166 http_client: self.http_client.clone(),
167 request_limiter: RateLimiter::new(4),
168 })
169 }
170}
171
172impl LanguageModelProviderState for OpenRouterLanguageModelProvider {
173 type ObservableEntity = State;
174
175 fn observable_entity(&self) -> Option<Entity<Self::ObservableEntity>> {
176 Some(self.state.clone())
177 }
178}
179
180impl LanguageModelProvider for OpenRouterLanguageModelProvider {
181 fn id(&self) -> LanguageModelProviderId {
182 PROVIDER_ID
183 }
184
185 fn name(&self) -> LanguageModelProviderName {
186 PROVIDER_NAME
187 }
188
189 fn icon(&self) -> IconName {
190 IconName::AiOpenRouter
191 }
192
193 fn default_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
194 Some(self.create_language_model(open_router::Model::default()))
195 }
196
197 fn default_fast_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
198 Some(self.create_language_model(open_router::Model::default_fast()))
199 }
200
201 fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
202 let mut models_from_api = self.state.read(cx).available_models.clone();
203 let mut settings_models = Vec::new();
204
205 for model in &Self::settings(cx).available_models {
206 settings_models.push(open_router::Model {
207 name: model.name.clone(),
208 display_name: model.display_name.clone(),
209 max_tokens: model.max_tokens,
210 supports_tools: model.supports_tools,
211 supports_images: model.supports_images,
212 mode: model.mode.unwrap_or_default(),
213 provider: model.provider.clone(),
214 });
215 }
216
217 for settings_model in &settings_models {
218 if let Some(pos) = models_from_api
219 .iter()
220 .position(|m| m.name == settings_model.name)
221 {
222 models_from_api[pos] = settings_model.clone();
223 } else {
224 models_from_api.push(settings_model.clone());
225 }
226 }
227
228 models_from_api
229 .into_iter()
230 .map(|model| self.create_language_model(model))
231 .collect()
232 }
233
234 fn is_authenticated(&self, cx: &App) -> bool {
235 self.state.read(cx).is_authenticated()
236 }
237
238 fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
239 self.state.update(cx, |state, cx| state.authenticate(cx))
240 }
241
242 fn configuration_view(
243 &self,
244 _target_agent: language_model::ConfigurationViewTargetAgent,
245 window: &mut Window,
246 cx: &mut App,
247 ) -> AnyView {
248 cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx))
249 .into()
250 }
251
252 fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
253 self.state
254 .update(cx, |state, cx| state.set_api_key(None, cx))
255 }
256}
257
258pub struct OpenRouterLanguageModel {
259 id: LanguageModelId,
260 model: open_router::Model,
261 state: Entity<State>,
262 http_client: Arc<dyn HttpClient>,
263 request_limiter: RateLimiter,
264}
265
266impl OpenRouterLanguageModel {
267 fn stream_completion(
268 &self,
269 request: open_router::Request,
270 cx: &AsyncApp,
271 ) -> BoxFuture<
272 'static,
273 Result<
274 futures::stream::BoxStream<
275 'static,
276 Result<ResponseStreamEvent, open_router::OpenRouterError>,
277 >,
278 LanguageModelCompletionError,
279 >,
280 > {
281 let http_client = self.http_client.clone();
282 let Ok((api_key, api_url)) = self.state.read_with(cx, |state, cx| {
283 let api_url = OpenRouterLanguageModelProvider::api_url(cx);
284 (state.api_key_state.key(&api_url), api_url)
285 }) else {
286 return future::ready(Err(anyhow!("App state dropped").into())).boxed();
287 };
288
289 async move {
290 let Some(api_key) = api_key else {
291 return Err(LanguageModelCompletionError::NoApiKey {
292 provider: PROVIDER_NAME,
293 });
294 };
295 let request =
296 open_router::stream_completion(http_client.as_ref(), &api_url, &api_key, request);
297 request.await.map_err(Into::into)
298 }
299 .boxed()
300 }
301}
302
303impl LanguageModel for OpenRouterLanguageModel {
304 fn id(&self) -> LanguageModelId {
305 self.id.clone()
306 }
307
308 fn name(&self) -> LanguageModelName {
309 LanguageModelName::from(self.model.display_name().to_string())
310 }
311
312 fn provider_id(&self) -> LanguageModelProviderId {
313 PROVIDER_ID
314 }
315
316 fn provider_name(&self) -> LanguageModelProviderName {
317 PROVIDER_NAME
318 }
319
320 fn supports_tools(&self) -> bool {
321 self.model.supports_tool_calls()
322 }
323
324 fn tool_input_format(&self) -> LanguageModelToolSchemaFormat {
325 let model_id = self.model.id().trim().to_lowercase();
326 if model_id.contains("gemini") || model_id.contains("grok") {
327 LanguageModelToolSchemaFormat::JsonSchemaSubset
328 } else {
329 LanguageModelToolSchemaFormat::JsonSchema
330 }
331 }
332
333 fn telemetry_id(&self) -> String {
334 format!("openrouter/{}", self.model.id())
335 }
336
337 fn max_token_count(&self) -> u64 {
338 self.model.max_token_count()
339 }
340
341 fn max_output_tokens(&self) -> Option<u64> {
342 self.model.max_output_tokens()
343 }
344
345 fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
346 match choice {
347 LanguageModelToolChoice::Auto => true,
348 LanguageModelToolChoice::Any => true,
349 LanguageModelToolChoice::None => true,
350 }
351 }
352
353 fn supports_images(&self) -> bool {
354 self.model.supports_images.unwrap_or(false)
355 }
356
357 fn count_tokens(
358 &self,
359 request: LanguageModelRequest,
360 cx: &App,
361 ) -> BoxFuture<'static, Result<u64>> {
362 count_open_router_tokens(request, self.model.clone(), cx)
363 }
364
365 fn stream_completion(
366 &self,
367 request: LanguageModelRequest,
368 cx: &AsyncApp,
369 ) -> BoxFuture<
370 'static,
371 Result<
372 futures::stream::BoxStream<
373 'static,
374 Result<LanguageModelCompletionEvent, LanguageModelCompletionError>,
375 >,
376 LanguageModelCompletionError,
377 >,
378 > {
379 let request = into_open_router(request, &self.model, self.max_output_tokens());
380 let request = self.stream_completion(request, cx);
381 let future = self.request_limiter.stream(async move {
382 let response = request.await?;
383 Ok(OpenRouterEventMapper::new().map_stream(response))
384 });
385 async move { Ok(future.await?.boxed()) }.boxed()
386 }
387}
388
389pub fn into_open_router(
390 request: LanguageModelRequest,
391 model: &Model,
392 max_output_tokens: Option<u64>,
393) -> open_router::Request {
394 let mut messages = Vec::new();
395 for message in request.messages {
396 let reasoning_details = message.reasoning_details.clone();
397 for content in message.content {
398 match content {
399 MessageContent::Text(text) => add_message_content_part(
400 open_router::MessagePart::Text { text },
401 message.role,
402 &mut messages,
403 ),
404 MessageContent::Thinking { .. } => {}
405 MessageContent::RedactedThinking(_) => {}
406 MessageContent::Image(image) => {
407 add_message_content_part(
408 open_router::MessagePart::Image {
409 image_url: image.to_base64_url(),
410 },
411 message.role,
412 &mut messages,
413 );
414 }
415 MessageContent::ToolUse(tool_use) => {
416 let tool_call = open_router::ToolCall {
417 id: tool_use.id.to_string(),
418 content: open_router::ToolCallContent::Function {
419 function: open_router::FunctionContent {
420 name: tool_use.name.to_string(),
421 arguments: serde_json::to_string(&tool_use.input)
422 .unwrap_or_default(),
423 thought_signature: tool_use.thought_signature.clone(),
424 },
425 },
426 };
427
428 if let Some(open_router::RequestMessage::Assistant {
429 tool_calls,
430 reasoning_details: existing_reasoning,
431 ..
432 }) = messages.last_mut()
433 {
434 tool_calls.push(tool_call);
435 if existing_reasoning.is_none() && reasoning_details.is_some() {
436 *existing_reasoning = reasoning_details.clone();
437 }
438 } else {
439 messages.push(open_router::RequestMessage::Assistant {
440 content: None,
441 tool_calls: vec![tool_call],
442 reasoning_details: reasoning_details.clone(),
443 });
444 }
445 }
446 MessageContent::ToolResult(tool_result) => {
447 let content = match &tool_result.content {
448 LanguageModelToolResultContent::Text(text) => {
449 vec![open_router::MessagePart::Text {
450 text: text.to_string(),
451 }]
452 }
453 LanguageModelToolResultContent::Image(image) => {
454 vec![open_router::MessagePart::Image {
455 image_url: image.to_base64_url(),
456 }]
457 }
458 };
459
460 messages.push(open_router::RequestMessage::Tool {
461 content: content.into(),
462 tool_call_id: tool_result.tool_use_id.to_string(),
463 });
464 }
465 }
466 }
467 }
468
469 open_router::Request {
470 model: model.id().into(),
471 messages,
472 stream: true,
473 stop: request.stop,
474 temperature: request.temperature.unwrap_or(0.4),
475 max_tokens: max_output_tokens,
476 parallel_tool_calls: if model.supports_parallel_tool_calls() && !request.tools.is_empty() {
477 Some(false)
478 } else {
479 None
480 },
481 usage: open_router::RequestUsage { include: true },
482 reasoning: if request.thinking_allowed
483 && let OpenRouterModelMode::Thinking { budget_tokens } = model.mode
484 {
485 Some(open_router::Reasoning {
486 effort: None,
487 max_tokens: budget_tokens,
488 exclude: Some(false),
489 enabled: Some(true),
490 })
491 } else {
492 None
493 },
494 tools: request
495 .tools
496 .into_iter()
497 .map(|tool| open_router::ToolDefinition::Function {
498 function: open_router::FunctionDefinition {
499 name: tool.name,
500 description: Some(tool.description),
501 parameters: Some(tool.input_schema),
502 },
503 })
504 .collect(),
505 tool_choice: request.tool_choice.map(|choice| match choice {
506 LanguageModelToolChoice::Auto => open_router::ToolChoice::Auto,
507 LanguageModelToolChoice::Any => open_router::ToolChoice::Required,
508 LanguageModelToolChoice::None => open_router::ToolChoice::None,
509 }),
510 provider: model.provider.clone(),
511 }
512}
513
514fn add_message_content_part(
515 new_part: open_router::MessagePart,
516 role: Role,
517 messages: &mut Vec<open_router::RequestMessage>,
518) {
519 match (role, messages.last_mut()) {
520 (Role::User, Some(open_router::RequestMessage::User { content }))
521 | (Role::System, Some(open_router::RequestMessage::System { content })) => {
522 content.push_part(new_part);
523 }
524 (
525 Role::Assistant,
526 Some(open_router::RequestMessage::Assistant {
527 content: Some(content),
528 ..
529 }),
530 ) => {
531 content.push_part(new_part);
532 }
533 _ => {
534 messages.push(match role {
535 Role::User => open_router::RequestMessage::User {
536 content: open_router::MessageContent::from(vec![new_part]),
537 },
538 Role::Assistant => open_router::RequestMessage::Assistant {
539 content: Some(open_router::MessageContent::from(vec![new_part])),
540 tool_calls: Vec::new(),
541 reasoning_details: None,
542 },
543 Role::System => open_router::RequestMessage::System {
544 content: open_router::MessageContent::from(vec![new_part]),
545 },
546 });
547 }
548 }
549}
550
551pub struct OpenRouterEventMapper {
552 tool_calls_by_index: HashMap<usize, RawToolCall>,
553 reasoning_details: Option<serde_json::Value>,
554}
555
556impl OpenRouterEventMapper {
557 pub fn new() -> Self {
558 Self {
559 tool_calls_by_index: HashMap::default(),
560 reasoning_details: None,
561 }
562 }
563
564 pub fn map_stream(
565 mut self,
566 events: Pin<
567 Box<
568 dyn Send + Stream<Item = Result<ResponseStreamEvent, open_router::OpenRouterError>>,
569 >,
570 >,
571 ) -> impl Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>
572 {
573 events.flat_map(move |event| {
574 futures::stream::iter(match event {
575 Ok(event) => self.map_event(event),
576 Err(error) => vec![Err(error.into())],
577 })
578 })
579 }
580
581 pub fn map_event(
582 &mut self,
583 event: ResponseStreamEvent,
584 ) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
585 let Some(choice) = event.choices.first() else {
586 return vec![Err(LanguageModelCompletionError::from(anyhow!(
587 "Response contained no choices"
588 )))];
589 };
590
591 let mut events = Vec::new();
592
593 if let Some(details) = choice.delta.reasoning_details.clone() {
594 // Emit reasoning_details immediately
595 events.push(Ok(LanguageModelCompletionEvent::ReasoningDetails(
596 details.clone(),
597 )));
598 self.reasoning_details = Some(details);
599 }
600
601 if let Some(reasoning) = choice.delta.reasoning.clone() {
602 events.push(Ok(LanguageModelCompletionEvent::Thinking {
603 text: reasoning,
604 signature: None,
605 }));
606 }
607
608 if let Some(content) = choice.delta.content.clone() {
609 // OpenRouter send empty content string with the reasoning content
610 // This is a workaround for the OpenRouter API bug
611 if !content.is_empty() {
612 events.push(Ok(LanguageModelCompletionEvent::Text(content)));
613 }
614 }
615
616 if let Some(tool_calls) = choice.delta.tool_calls.as_ref() {
617 for tool_call in tool_calls {
618 let entry = self.tool_calls_by_index.entry(tool_call.index).or_default();
619
620 if let Some(tool_id) = tool_call.id.clone() {
621 entry.id = tool_id;
622 }
623
624 if let Some(function) = tool_call.function.as_ref() {
625 if let Some(name) = function.name.clone() {
626 entry.name = name;
627 }
628
629 if let Some(arguments) = function.arguments.clone() {
630 entry.arguments.push_str(&arguments);
631 }
632
633 if let Some(signature) = function.thought_signature.clone() {
634 entry.thought_signature = Some(signature);
635 }
636 }
637 }
638 }
639
640 if let Some(usage) = event.usage {
641 events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(TokenUsage {
642 input_tokens: usage.prompt_tokens,
643 output_tokens: usage.completion_tokens,
644 cache_creation_input_tokens: 0,
645 cache_read_input_tokens: 0,
646 })));
647 }
648
649 match choice.finish_reason.as_deref() {
650 Some("stop") => {
651 // Don't emit reasoning_details here - already emitted immediately when captured
652 events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)));
653 }
654 Some("tool_calls") => {
655 events.extend(self.tool_calls_by_index.drain().map(|(_, tool_call)| {
656 match serde_json::Value::from_str(&tool_call.arguments) {
657 Ok(input) => Ok(LanguageModelCompletionEvent::ToolUse(
658 LanguageModelToolUse {
659 id: tool_call.id.clone().into(),
660 name: tool_call.name.as_str().into(),
661 is_input_complete: true,
662 input,
663 raw_input: tool_call.arguments.clone(),
664 thought_signature: tool_call.thought_signature.clone(),
665 },
666 )),
667 Err(error) => Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
668 id: tool_call.id.clone().into(),
669 tool_name: tool_call.name.as_str().into(),
670 raw_input: tool_call.arguments.clone().into(),
671 json_parse_error: error.to_string(),
672 }),
673 }
674 }));
675
676 // Don't emit reasoning_details here - already emitted immediately when captured
677 events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::ToolUse)));
678 }
679 Some(stop_reason) => {
680 log::error!("Unexpected OpenRouter stop_reason: {stop_reason:?}",);
681 // Don't emit reasoning_details here - already emitted immediately when captured
682 events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)));
683 }
684 None => {}
685 }
686
687 events
688 }
689}
690
691#[derive(Default)]
692struct RawToolCall {
693 id: String,
694 name: String,
695 arguments: String,
696 thought_signature: Option<String>,
697}
698
699pub fn count_open_router_tokens(
700 request: LanguageModelRequest,
701 _model: open_router::Model,
702 cx: &App,
703) -> BoxFuture<'static, Result<u64>> {
704 cx.background_spawn(async move {
705 let messages = request
706 .messages
707 .into_iter()
708 .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
709 role: match message.role {
710 Role::User => "user".into(),
711 Role::Assistant => "assistant".into(),
712 Role::System => "system".into(),
713 },
714 content: Some(message.string_contents()),
715 name: None,
716 function_call: None,
717 })
718 .collect::<Vec<_>>();
719
720 tiktoken_rs::num_tokens_from_messages("gpt-4o", &messages).map(|tokens| tokens as u64)
721 })
722 .boxed()
723}
724
725struct ConfigurationView {
726 api_key_editor: Entity<InputField>,
727 state: Entity<State>,
728 load_credentials_task: Option<Task<()>>,
729}
730
731impl ConfigurationView {
732 fn new(state: Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
733 let api_key_editor = cx.new(|cx| {
734 InputField::new(
735 window,
736 cx,
737 "sk_or_000000000000000000000000000000000000000000000000",
738 )
739 });
740
741 cx.observe(&state, |_, _, cx| {
742 cx.notify();
743 })
744 .detach();
745
746 let load_credentials_task = Some(cx.spawn_in(window, {
747 let state = state.clone();
748 async move |this, cx| {
749 if let Some(task) = state
750 .update(cx, |state, cx| state.authenticate(cx))
751 .log_err()
752 {
753 let _ = task.await;
754 }
755
756 this.update(cx, |this, cx| {
757 this.load_credentials_task = None;
758 cx.notify();
759 })
760 .log_err();
761 }
762 }));
763
764 Self {
765 api_key_editor,
766 state,
767 load_credentials_task,
768 }
769 }
770
771 fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
772 let api_key = self.api_key_editor.read(cx).text(cx).trim().to_string();
773 if api_key.is_empty() {
774 return;
775 }
776
777 // url changes can cause the editor to be displayed again
778 self.api_key_editor
779 .update(cx, |editor, cx| editor.set_text("", window, cx));
780
781 let state = self.state.clone();
782 cx.spawn_in(window, async move |_, cx| {
783 state
784 .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))?
785 .await
786 })
787 .detach_and_log_err(cx);
788 }
789
790 fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context<Self>) {
791 self.api_key_editor
792 .update(cx, |editor, cx| editor.set_text("", window, cx));
793
794 let state = self.state.clone();
795 cx.spawn_in(window, async move |_, cx| {
796 state
797 .update(cx, |state, cx| state.set_api_key(None, cx))?
798 .await
799 })
800 .detach_and_log_err(cx);
801 }
802
803 fn should_render_editor(&self, cx: &mut Context<Self>) -> bool {
804 !self.state.read(cx).is_authenticated()
805 }
806}
807
808impl Render for ConfigurationView {
809 fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
810 let env_var_set = self.state.read(cx).api_key_state.is_from_env_var();
811 let configured_card_label = if env_var_set {
812 format!("API key set in {API_KEY_ENV_VAR_NAME} environment variable")
813 } else {
814 let api_url = OpenRouterLanguageModelProvider::api_url(cx);
815 if api_url == OPEN_ROUTER_API_URL {
816 "API key configured".to_string()
817 } else {
818 format!("API key configured for {}", api_url)
819 }
820 };
821
822 if self.load_credentials_task.is_some() {
823 div()
824 .child(Label::new("Loading credentials..."))
825 .into_any_element()
826 } else if self.should_render_editor(cx) {
827 v_flex()
828 .size_full()
829 .on_action(cx.listener(Self::save_api_key))
830 .child(Label::new("To use Zed's agent with OpenRouter, you need to add an API key. Follow these steps:"))
831 .child(
832 List::new()
833 .child(InstructionListItem::new(
834 "Create an API key by visiting",
835 Some("OpenRouter's console"),
836 Some("https://openrouter.ai/keys"),
837 ))
838 .child(InstructionListItem::text_only(
839 "Ensure your OpenRouter account has credits",
840 ))
841 .child(InstructionListItem::text_only(
842 "Paste your API key below and hit enter to start using the assistant",
843 )),
844 )
845 .child(self.api_key_editor.clone())
846 .child(
847 Label::new(
848 format!("You can also assign the {API_KEY_ENV_VAR_NAME} environment variable and restart Zed."),
849 )
850 .size(LabelSize::Small).color(Color::Muted),
851 )
852 .into_any_element()
853 } else {
854 ConfiguredApiCard::new(configured_card_label)
855 .disabled(env_var_set)
856 .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx)))
857 .when(env_var_set, |this| {
858 this.tooltip_label(format!("To reset your API key, unset the {API_KEY_ENV_VAR_NAME} environment variable."))
859 })
860 .into_any_element()
861 }
862 }
863}
864
865#[cfg(test)]
866mod tests {
867 use super::*;
868
869 use open_router::{ChoiceDelta, FunctionChunk, ResponseMessageDelta, ToolCallChunk};
870
871 #[gpui::test]
872 async fn test_reasoning_details_preservation_with_tool_calls() {
873 // This test verifies that reasoning_details are properly captured and preserved
874 // when a model uses tool calling with reasoning/thinking tokens.
875 //
876 // The key regression this prevents:
877 // - OpenRouter sends multiple reasoning_details updates during streaming
878 // - First with actual content (encrypted reasoning data)
879 // - Then with empty array on completion
880 // - We must NOT overwrite the real data with the empty array
881
882 let mut mapper = OpenRouterEventMapper::new();
883
884 // Simulate the streaming events as they come from OpenRouter/Gemini
885 let events = vec![
886 // Event 1: Initial reasoning details with text
887 ResponseStreamEvent {
888 id: Some("response_123".into()),
889 created: 1234567890,
890 model: "google/gemini-3-pro-preview".into(),
891 choices: vec![ChoiceDelta {
892 index: 0,
893 delta: ResponseMessageDelta {
894 role: None,
895 content: None,
896 reasoning: None,
897 tool_calls: None,
898 reasoning_details: Some(serde_json::json!([
899 {
900 "type": "reasoning.text",
901 "text": "Let me analyze this request...",
902 "format": "google-gemini-v1",
903 "index": 0
904 }
905 ])),
906 },
907 finish_reason: None,
908 }],
909 usage: None,
910 },
911 // Event 2: More reasoning details
912 ResponseStreamEvent {
913 id: Some("response_123".into()),
914 created: 1234567890,
915 model: "google/gemini-3-pro-preview".into(),
916 choices: vec![ChoiceDelta {
917 index: 0,
918 delta: ResponseMessageDelta {
919 role: None,
920 content: None,
921 reasoning: None,
922 tool_calls: None,
923 reasoning_details: Some(serde_json::json!([
924 {
925 "type": "reasoning.encrypted",
926 "data": "EtgDCtUDAdHtim9OF5jm4aeZSBAtl/randomized123",
927 "format": "google-gemini-v1",
928 "index": 0,
929 "id": "tool_call_abc123"
930 }
931 ])),
932 },
933 finish_reason: None,
934 }],
935 usage: None,
936 },
937 // Event 3: Tool call starts
938 ResponseStreamEvent {
939 id: Some("response_123".into()),
940 created: 1234567890,
941 model: "google/gemini-3-pro-preview".into(),
942 choices: vec![ChoiceDelta {
943 index: 0,
944 delta: ResponseMessageDelta {
945 role: None,
946 content: None,
947 reasoning: None,
948 tool_calls: Some(vec![ToolCallChunk {
949 index: 0,
950 id: Some("tool_call_abc123".into()),
951 function: Some(FunctionChunk {
952 name: Some("list_directory".into()),
953 arguments: Some("{\"path\":\"test\"}".into()),
954 thought_signature: Some("sha256:test_signature_xyz789".into()),
955 }),
956 }]),
957 reasoning_details: None,
958 },
959 finish_reason: None,
960 }],
961 usage: None,
962 },
963 // Event 4: Empty reasoning_details on tool_calls finish
964 // This is the critical event - we must not overwrite with this empty array!
965 ResponseStreamEvent {
966 id: Some("response_123".into()),
967 created: 1234567890,
968 model: "google/gemini-3-pro-preview".into(),
969 choices: vec![ChoiceDelta {
970 index: 0,
971 delta: ResponseMessageDelta {
972 role: None,
973 content: None,
974 reasoning: None,
975 tool_calls: None,
976 reasoning_details: Some(serde_json::json!([])),
977 },
978 finish_reason: Some("tool_calls".into()),
979 }],
980 usage: None,
981 },
982 ];
983
984 // Process all events
985 let mut collected_events = Vec::new();
986 for event in events {
987 let mapped = mapper.map_event(event);
988 collected_events.extend(mapped);
989 }
990
991 // Verify we got the expected events
992 let mut has_tool_use = false;
993 let mut reasoning_details_events = Vec::new();
994 let mut thought_signature_value = None;
995
996 for event_result in collected_events {
997 match event_result {
998 Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) => {
999 has_tool_use = true;
1000 assert_eq!(tool_use.id.to_string(), "tool_call_abc123");
1001 assert_eq!(tool_use.name.as_ref(), "list_directory");
1002 thought_signature_value = tool_use.thought_signature.clone();
1003 }
1004 Ok(LanguageModelCompletionEvent::ReasoningDetails(details)) => {
1005 reasoning_details_events.push(details);
1006 }
1007 _ => {}
1008 }
1009 }
1010
1011 // Assertions
1012 assert!(has_tool_use, "Should have emitted ToolUse event");
1013 assert!(
1014 !reasoning_details_events.is_empty(),
1015 "Should have emitted ReasoningDetails events"
1016 );
1017
1018 // We should have received multiple reasoning_details events (text, encrypted, empty)
1019 // The agent layer is responsible for keeping only the first non-empty one
1020 assert!(
1021 reasoning_details_events.len() >= 2,
1022 "Should have multiple reasoning_details events from streaming"
1023 );
1024
1025 // Verify at least one contains the encrypted data
1026 let has_encrypted = reasoning_details_events.iter().any(|details| {
1027 if let serde_json::Value::Array(arr) = details {
1028 arr.iter().any(|item| {
1029 item["type"] == "reasoning.encrypted"
1030 && item["data"]
1031 .as_str()
1032 .map_or(false, |s| s.contains("EtgDCtUDAdHtim9OF5jm4aeZSBAtl"))
1033 })
1034 } else {
1035 false
1036 }
1037 });
1038 assert!(
1039 has_encrypted,
1040 "Should have at least one reasoning_details with encrypted data"
1041 );
1042
1043 // Verify thought_signature was captured
1044 assert!(
1045 thought_signature_value.is_some(),
1046 "Tool use should have thought_signature"
1047 );
1048 assert_eq!(
1049 thought_signature_value.unwrap(),
1050 "sha256:test_signature_xyz789"
1051 );
1052 }
1053
1054 #[gpui::test]
1055 async fn test_agent_prevents_empty_reasoning_details_overwrite() {
1056 // This test verifies that the agent layer prevents empty reasoning_details
1057 // from overwriting non-empty ones, even though the mapper emits all events.
1058
1059 // Simulate what the agent does when it receives multiple ReasoningDetails events
1060 let mut agent_reasoning_details: Option<serde_json::Value> = None;
1061
1062 let events = vec![
1063 // First event: non-empty reasoning_details
1064 serde_json::json!([
1065 {
1066 "type": "reasoning.encrypted",
1067 "data": "real_data_here",
1068 "format": "google-gemini-v1"
1069 }
1070 ]),
1071 // Second event: empty array (should not overwrite)
1072 serde_json::json!([]),
1073 ];
1074
1075 for details in events {
1076 // This mimics the agent's logic: only store if we don't already have it
1077 if agent_reasoning_details.is_none() {
1078 agent_reasoning_details = Some(details);
1079 }
1080 }
1081
1082 // Verify the agent kept the first non-empty reasoning_details
1083 assert!(agent_reasoning_details.is_some());
1084 let final_details = agent_reasoning_details.unwrap();
1085 if let serde_json::Value::Array(arr) = &final_details {
1086 assert!(
1087 !arr.is_empty(),
1088 "Agent should have kept the non-empty reasoning_details"
1089 );
1090 assert_eq!(arr[0]["data"], "real_data_here");
1091 } else {
1092 panic!("Expected array");
1093 }
1094 }
1095}