From c44d93745a3c444699bfe4912f7fbd194039bcdb Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Tue, 18 Nov 2025 12:28:14 -0300 Subject: [PATCH] agent_ui: Improve the modal to add LLM providers (#42983) Closes https://github.com/zed-industries/zed/issues/42807 This PR makes the modal to add LLM providers a bit better to interact with: 1. Added a scrollbar 2. Made the inputs navigable with tab 3. Added some responsiveness to ensure it resizes on shorter windows https://github.com/user-attachments/assets/758ea5f0-6bcc-4a2b-87ea-114982f37caf Release Notes: - agent: Improved the modal to add LLM providers by making it responsive and keyboard navigable. --- .../add_llm_provider_modal.rs | 144 +++++++++++++----- crates/ui/src/components/modal.rs | 2 +- 2 files changed, 103 insertions(+), 43 deletions(-) diff --git a/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs b/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs index f41b59132d8c0dfe5ed39fe84775e61b8131cc0a..3427dab0d22c6900a3078f1dcb4cc7e892cce7db 100644 --- a/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs +++ b/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs @@ -3,16 +3,42 @@ use std::sync::Arc; use anyhow::Result; use collections::HashSet; use fs::Fs; -use gpui::{DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Render, Task}; +use gpui::{ + DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Render, ScrollHandle, Task, +}; use language_model::LanguageModelRegistry; use language_models::provider::open_ai_compatible::{AvailableModel, ModelCapabilities}; use settings::{OpenAiCompatibleSettingsContent, update_settings_file}; use ui::{ - Banner, Checkbox, KeyBinding, Modal, ModalFooter, ModalHeader, Section, ToggleState, prelude::*, + Banner, Checkbox, KeyBinding, Modal, ModalFooter, ModalHeader, Section, ToggleState, + WithScrollbar, prelude::*, }; use ui_input::InputField; use workspace::{ModalView, Workspace}; +fn single_line_input( + label: impl Into, + placeholder: impl Into, + text: Option<&str>, + tab_index: isize, + window: &mut Window, + cx: &mut App, +) -> Entity { + cx.new(|cx| { + let input = InputField::new(window, cx, placeholder) + .label(label) + .tab_index(tab_index) + .tab_stop(true); + + if let Some(text) = text { + input + .editor() + .update(cx, |editor, cx| editor.set_text(text, window, cx)); + } + input + }) +} + #[derive(Clone, Copy)] pub enum LlmCompatibleProvider { OpenAi, @@ -41,12 +67,14 @@ struct AddLlmProviderInput { impl AddLlmProviderInput { fn new(provider: LlmCompatibleProvider, window: &mut Window, cx: &mut App) -> Self { - let provider_name = single_line_input("Provider Name", provider.name(), None, window, cx); - let api_url = single_line_input("API URL", provider.api_url(), None, window, cx); + let provider_name = + single_line_input("Provider Name", provider.name(), None, 1, window, cx); + let api_url = single_line_input("API URL", provider.api_url(), None, 2, window, cx); let api_key = single_line_input( "API Key", "000000000000000000000000000000000000000000000000", None, + 3, window, cx, ); @@ -55,12 +83,13 @@ impl AddLlmProviderInput { provider_name, api_url, api_key, - models: vec![ModelInput::new(window, cx)], + models: vec![ModelInput::new(0, window, cx)], } } fn add_model(&mut self, window: &mut Window, cx: &mut App) { - self.models.push(ModelInput::new(window, cx)); + let model_index = self.models.len(); + self.models.push(ModelInput::new(model_index, window, cx)); } fn remove_model(&mut self, index: usize) { @@ -84,11 +113,14 @@ struct ModelInput { } impl ModelInput { - fn new(window: &mut Window, cx: &mut App) -> Self { + fn new(model_index: usize, window: &mut Window, cx: &mut App) -> Self { + let base_tab_index = (3 + (model_index * 4)) as isize; + let model_name = single_line_input( "Model Name", "e.g. gpt-4o, claude-opus-4, gemini-2.5-pro", None, + base_tab_index + 1, window, cx, ); @@ -96,6 +128,7 @@ impl ModelInput { "Max Completion Tokens", "200000", Some("200000"), + base_tab_index + 2, window, cx, ); @@ -103,16 +136,26 @@ impl ModelInput { "Max Output Tokens", "Max Output Tokens", Some("32000"), + base_tab_index + 3, window, cx, ); - let max_tokens = single_line_input("Max Tokens", "Max Tokens", Some("200000"), window, cx); + let max_tokens = single_line_input( + "Max Tokens", + "Max Tokens", + Some("200000"), + base_tab_index + 4, + window, + cx, + ); + let ModelCapabilities { tools, images, parallel_tool_calls, prompt_cache_key, } = ModelCapabilities::default(); + Self { name: model_name, max_completion_tokens, @@ -165,24 +208,6 @@ impl ModelInput { } } -fn single_line_input( - label: impl Into, - placeholder: impl Into, - text: Option<&str>, - window: &mut Window, - cx: &mut App, -) -> Entity { - cx.new(|cx| { - let input = InputField::new(window, cx, placeholder).label(label); - if let Some(text) = text { - input - .editor() - .update(cx, |editor, cx| editor.set_text(text, window, cx)); - } - input - }) -} - fn save_provider_to_settings( input: &AddLlmProviderInput, cx: &mut App, @@ -258,6 +283,7 @@ fn save_provider_to_settings( pub struct AddLlmProviderModal { provider: LlmCompatibleProvider, input: AddLlmProviderInput, + scroll_handle: ScrollHandle, focus_handle: FocusHandle, last_error: Option, } @@ -278,6 +304,7 @@ impl AddLlmProviderModal { provider, last_error: None, focus_handle: cx.focus_handle(), + scroll_handle: ScrollHandle::new(), } } @@ -418,6 +445,19 @@ impl AddLlmProviderModal { ) }) } + + fn on_tab(&mut self, _: &menu::SelectNext, window: &mut Window, _: &mut Context) { + window.focus_next(); + } + + fn on_tab_prev( + &mut self, + _: &menu::SelectPrevious, + window: &mut Window, + _: &mut Context, + ) { + window.focus_prev(); + } } impl EventEmitter for AddLlmProviderModal {} @@ -431,15 +471,27 @@ impl Focusable for AddLlmProviderModal { impl ModalView for AddLlmProviderModal {} impl Render for AddLlmProviderModal { - fn render(&mut self, _window: &mut ui::Window, cx: &mut ui::Context) -> impl IntoElement { + fn render(&mut self, window: &mut ui::Window, cx: &mut ui::Context) -> impl IntoElement { let focus_handle = self.focus_handle(cx); - div() + let window_size = window.viewport_size(); + let rem_size = window.rem_size(); + let is_large_window = window_size.height / rem_size > rems_from_px(600.).0; + + let modal_max_height = if is_large_window { + rems_from_px(450.) + } else { + rems_from_px(200.) + }; + + v_flex() .id("add-llm-provider-modal") .key_context("AddLlmProviderModal") .w(rems(34.)) .elevation_3(cx) .on_action(cx.listener(Self::cancel)) + .on_action(cx.listener(Self::on_tab)) + .on_action(cx.listener(Self::on_tab_prev)) .capture_any_mouse_down(cx.listener(|this, _, window, cx| { this.focus_handle(cx).focus(window); })) @@ -462,17 +514,25 @@ impl Render for AddLlmProviderModal { ) }) .child( - v_flex() - .id("modal_content") + div() .size_full() - .max_h_128() - .overflow_y_scroll() - .px(DynamicSpacing::Base12.rems(cx)) - .gap(DynamicSpacing::Base04.rems(cx)) - .child(self.input.provider_name.clone()) - .child(self.input.api_url.clone()) - .child(self.input.api_key.clone()) - .child(self.render_model_section(cx)), + .vertical_scrollbar_for(self.scroll_handle.clone(), window, cx) + .child( + v_flex() + .id("modal_content") + .size_full() + .tab_group() + .max_h(modal_max_height) + .pl_3() + .pr_4() + .gap_2() + .overflow_y_scroll() + .track_scroll(&self.scroll_handle) + .child(self.input.provider_name.clone()) + .child(self.input.api_url.clone()) + .child(self.input.api_key.clone()) + .child(self.render_model_section(cx)), + ), ) .footer( ModalFooter::new().end_slot( @@ -642,7 +702,7 @@ mod tests { let cx = setup_test(cx).await; cx.update(|window, cx| { - let model_input = ModelInput::new(window, cx); + let model_input = ModelInput::new(0, window, cx); model_input.name.update(cx, |input, cx| { input.editor().update(cx, |editor, cx| { editor.set_text("somemodel", window, cx); @@ -678,7 +738,7 @@ mod tests { let cx = setup_test(cx).await; cx.update(|window, cx| { - let mut model_input = ModelInput::new(window, cx); + let mut model_input = ModelInput::new(0, window, cx); model_input.name.update(cx, |input, cx| { input.editor().update(cx, |editor, cx| { editor.set_text("somemodel", window, cx); @@ -703,7 +763,7 @@ mod tests { let cx = setup_test(cx).await; cx.update(|window, cx| { - let mut model_input = ModelInput::new(window, cx); + let mut model_input = ModelInput::new(0, window, cx); model_input.name.update(cx, |input, cx| { input.editor().update(cx, |editor, cx| { editor.set_text("somemodel", window, cx); @@ -767,7 +827,7 @@ mod tests { models.iter().enumerate() { if i >= input.models.len() { - input.models.push(ModelInput::new(window, cx)); + input.models.push(ModelInput::new(i, window, cx)); } let model = &mut input.models[i]; set_text(&model.name, name, window, cx); diff --git a/crates/ui/src/components/modal.rs b/crates/ui/src/components/modal.rs index a70f5e1ea5a53a043086f3e102878f3614990d6e..85565f54885a06ddf2bc84f3639ca00fc4acc50e 100644 --- a/crates/ui/src/components/modal.rs +++ b/crates/ui/src/components/modal.rs @@ -77,6 +77,7 @@ impl RenderOnce for Modal { .w_full() .flex_1() .gap(DynamicSpacing::Base08.rems(cx)) + .when(self.footer.is_some(), |this| this.pb_4()) .when_some( self.container_scroll_handler, |this, container_scroll_handle| { @@ -276,7 +277,6 @@ impl RenderOnce for ModalFooter { fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { h_flex() .w_full() - .mt_4() .p(DynamicSpacing::Base08.rems(cx)) .flex_none() .justify_between()