1use std::sync::Arc;
2
3use anyhow::Result;
4use collections::HashSet;
5use fs::Fs;
6use gpui::{DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Render, Task};
7use language_model::LanguageModelRegistry;
8use language_models::{
9 AllLanguageModelSettings, OpenAiCompatibleSettingsContent,
10 provider::open_ai_compatible::{AvailableModel, ModelCapabilities},
11};
12use settings::update_settings_file;
13use ui::{
14 Banner, Checkbox, KeyBinding, Modal, ModalFooter, ModalHeader, Section, ToggleState, prelude::*,
15};
16use ui_input::SingleLineInput;
17use workspace::{ModalView, Workspace};
18
19#[derive(Clone, Copy)]
20pub enum LlmCompatibleProvider {
21 OpenAi,
22}
23
24impl LlmCompatibleProvider {
25 fn name(&self) -> &'static str {
26 match self {
27 LlmCompatibleProvider::OpenAi => "OpenAI",
28 }
29 }
30
31 fn api_url(&self) -> &'static str {
32 match self {
33 LlmCompatibleProvider::OpenAi => "https://api.openai.com/v1",
34 }
35 }
36}
37
38struct AddLlmProviderInput {
39 provider_name: Entity<SingleLineInput>,
40 api_url: Entity<SingleLineInput>,
41 api_key: Entity<SingleLineInput>,
42 models: Vec<ModelInput>,
43}
44
45impl AddLlmProviderInput {
46 fn new(provider: LlmCompatibleProvider, window: &mut Window, cx: &mut App) -> Self {
47 let provider_name = single_line_input("Provider Name", provider.name(), None, window, cx);
48 let api_url = single_line_input("API URL", provider.api_url(), None, window, cx);
49 let api_key = single_line_input(
50 "API Key",
51 "000000000000000000000000000000000000000000000000",
52 None,
53 window,
54 cx,
55 );
56
57 Self {
58 provider_name,
59 api_url,
60 api_key,
61 models: vec![ModelInput::new(window, cx)],
62 }
63 }
64
65 fn add_model(&mut self, window: &mut Window, cx: &mut App) {
66 self.models.push(ModelInput::new(window, cx));
67 }
68
69 fn remove_model(&mut self, index: usize) {
70 self.models.remove(index);
71 }
72}
73
74struct ModelCapabilityToggles {
75 pub supports_tools: ToggleState,
76 pub supports_images: ToggleState,
77 pub supports_parallel_tool_calls: ToggleState,
78 pub supports_prompt_cache_key: ToggleState,
79}
80
81struct ModelInput {
82 name: Entity<SingleLineInput>,
83 max_completion_tokens: Entity<SingleLineInput>,
84 max_output_tokens: Entity<SingleLineInput>,
85 max_tokens: Entity<SingleLineInput>,
86 capabilities: ModelCapabilityToggles,
87}
88
89impl ModelInput {
90 fn new(window: &mut Window, cx: &mut App) -> Self {
91 let model_name = single_line_input(
92 "Model Name",
93 "e.g. gpt-4o, claude-opus-4, gemini-2.5-pro",
94 None,
95 window,
96 cx,
97 );
98 let max_completion_tokens = single_line_input(
99 "Max Completion Tokens",
100 "200000",
101 Some("200000"),
102 window,
103 cx,
104 );
105 let max_output_tokens = single_line_input(
106 "Max Output Tokens",
107 "Max Output Tokens",
108 Some("32000"),
109 window,
110 cx,
111 );
112 let max_tokens = single_line_input("Max Tokens", "Max Tokens", Some("200000"), window, cx);
113 let ModelCapabilities {
114 tools,
115 images,
116 parallel_tool_calls,
117 prompt_cache_key,
118 } = ModelCapabilities::default();
119 Self {
120 name: model_name,
121 max_completion_tokens,
122 max_output_tokens,
123 max_tokens,
124 capabilities: ModelCapabilityToggles {
125 supports_tools: tools.into(),
126 supports_images: images.into(),
127 supports_parallel_tool_calls: parallel_tool_calls.into(),
128 supports_prompt_cache_key: prompt_cache_key.into(),
129 },
130 }
131 }
132
133 fn parse(&self, cx: &App) -> Result<AvailableModel, SharedString> {
134 let name = self.name.read(cx).text(cx);
135 if name.is_empty() {
136 return Err(SharedString::from("Model Name cannot be empty"));
137 }
138 Ok(AvailableModel {
139 name,
140 display_name: None,
141 max_completion_tokens: Some(
142 self.max_completion_tokens
143 .read(cx)
144 .text(cx)
145 .parse::<u64>()
146 .map_err(|_| SharedString::from("Max Completion Tokens must be a number"))?,
147 ),
148 max_output_tokens: Some(
149 self.max_output_tokens
150 .read(cx)
151 .text(cx)
152 .parse::<u64>()
153 .map_err(|_| SharedString::from("Max Output Tokens must be a number"))?,
154 ),
155 max_tokens: self
156 .max_tokens
157 .read(cx)
158 .text(cx)
159 .parse::<u64>()
160 .map_err(|_| SharedString::from("Max Tokens must be a number"))?,
161 capabilities: ModelCapabilities {
162 tools: self.capabilities.supports_tools.selected(),
163 images: self.capabilities.supports_images.selected(),
164 parallel_tool_calls: self.capabilities.supports_parallel_tool_calls.selected(),
165 prompt_cache_key: self.capabilities.supports_prompt_cache_key.selected(),
166 },
167 })
168 }
169}
170
171fn single_line_input(
172 label: impl Into<SharedString>,
173 placeholder: impl Into<SharedString>,
174 text: Option<&str>,
175 window: &mut Window,
176 cx: &mut App,
177) -> Entity<SingleLineInput> {
178 cx.new(|cx| {
179 let input = SingleLineInput::new(window, cx, placeholder).label(label);
180 if let Some(text) = text {
181 input
182 .editor()
183 .update(cx, |editor, cx| editor.set_text(text, window, cx));
184 }
185 input
186 })
187}
188
189fn save_provider_to_settings(
190 input: &AddLlmProviderInput,
191 cx: &mut App,
192) -> Task<Result<(), SharedString>> {
193 let provider_name: Arc<str> = input.provider_name.read(cx).text(cx).into();
194 if provider_name.is_empty() {
195 return Task::ready(Err("Provider Name cannot be empty".into()));
196 }
197
198 if LanguageModelRegistry::read_global(cx)
199 .providers()
200 .iter()
201 .any(|provider| {
202 provider.id().0.as_ref() == provider_name.as_ref()
203 || provider.name().0.as_ref() == provider_name.as_ref()
204 })
205 {
206 return Task::ready(Err(
207 "Provider Name is already taken by another provider".into()
208 ));
209 }
210
211 let api_url = input.api_url.read(cx).text(cx);
212 if api_url.is_empty() {
213 return Task::ready(Err("API URL cannot be empty".into()));
214 }
215
216 let api_key = input.api_key.read(cx).text(cx);
217 if api_key.is_empty() {
218 return Task::ready(Err("API Key cannot be empty".into()));
219 }
220
221 let mut models = Vec::new();
222 let mut model_names: HashSet<String> = HashSet::default();
223 for model in &input.models {
224 match model.parse(cx) {
225 Ok(model) => {
226 if !model_names.insert(model.name.clone()) {
227 return Task::ready(Err("Model Names must be unique".into()));
228 }
229 models.push(model)
230 }
231 Err(err) => return Task::ready(Err(err)),
232 }
233 }
234
235 let fs = <dyn Fs>::global(cx);
236 let task = cx.write_credentials(&api_url, "Bearer", api_key.as_bytes());
237 cx.spawn(async move |cx| {
238 task.await
239 .map_err(|_| "Failed to write API key to keychain")?;
240 cx.update(|cx| {
241 update_settings_file::<AllLanguageModelSettings>(fs, cx, |settings, _cx| {
242 settings.openai_compatible.get_or_insert_default().insert(
243 provider_name,
244 OpenAiCompatibleSettingsContent {
245 api_url,
246 available_models: models,
247 },
248 );
249 });
250 })
251 .ok();
252 Ok(())
253 })
254}
255
256pub struct AddLlmProviderModal {
257 provider: LlmCompatibleProvider,
258 input: AddLlmProviderInput,
259 focus_handle: FocusHandle,
260 last_error: Option<SharedString>,
261}
262
263impl AddLlmProviderModal {
264 pub fn toggle(
265 provider: LlmCompatibleProvider,
266 workspace: &mut Workspace,
267 window: &mut Window,
268 cx: &mut Context<Workspace>,
269 ) {
270 workspace.toggle_modal(window, cx, |window, cx| Self::new(provider, window, cx));
271 }
272
273 fn new(provider: LlmCompatibleProvider, window: &mut Window, cx: &mut Context<Self>) -> Self {
274 Self {
275 input: AddLlmProviderInput::new(provider, window, cx),
276 provider,
277 last_error: None,
278 focus_handle: cx.focus_handle(),
279 }
280 }
281
282 fn confirm(&mut self, _: &menu::Confirm, _: &mut Window, cx: &mut Context<Self>) {
283 let task = save_provider_to_settings(&self.input, cx);
284 cx.spawn(async move |this, cx| {
285 let result = task.await;
286 this.update(cx, |this, cx| match result {
287 Ok(_) => {
288 cx.emit(DismissEvent);
289 }
290 Err(error) => {
291 this.last_error = Some(error);
292 cx.notify();
293 }
294 })
295 })
296 .detach_and_log_err(cx);
297 }
298
299 fn cancel(&mut self, _: &menu::Cancel, _: &mut Window, cx: &mut Context<Self>) {
300 cx.emit(DismissEvent);
301 }
302
303 fn render_model_section(&self, cx: &mut Context<Self>) -> impl IntoElement {
304 v_flex()
305 .mt_1()
306 .gap_2()
307 .child(
308 h_flex()
309 .justify_between()
310 .child(Label::new("Models").size(LabelSize::Small))
311 .child(
312 Button::new("add-model", "Add Model")
313 .icon(IconName::Plus)
314 .icon_position(IconPosition::Start)
315 .icon_size(IconSize::XSmall)
316 .icon_color(Color::Muted)
317 .label_size(LabelSize::Small)
318 .on_click(cx.listener(|this, _, window, cx| {
319 this.input.add_model(window, cx);
320 cx.notify();
321 })),
322 ),
323 )
324 .children(
325 self.input
326 .models
327 .iter()
328 .enumerate()
329 .map(|(ix, _)| self.render_model(ix, cx)),
330 )
331 }
332
333 fn render_model(&self, ix: usize, cx: &mut Context<Self>) -> impl IntoElement + use<> {
334 let has_more_than_one_model = self.input.models.len() > 1;
335 let model = &self.input.models[ix];
336
337 v_flex()
338 .p_2()
339 .gap_2()
340 .rounded_sm()
341 .border_1()
342 .border_dashed()
343 .border_color(cx.theme().colors().border.opacity(0.6))
344 .bg(cx.theme().colors().element_active.opacity(0.15))
345 .child(model.name.clone())
346 .child(
347 h_flex()
348 .gap_2()
349 .child(model.max_completion_tokens.clone())
350 .child(model.max_output_tokens.clone()),
351 )
352 .child(model.max_tokens.clone())
353 .child(
354 v_flex()
355 .gap_1()
356 .child(
357 Checkbox::new(("supports-tools", ix), model.capabilities.supports_tools)
358 .label("Supports tools")
359 .on_click(cx.listener(move |this, checked, _window, cx| {
360 this.input.models[ix].capabilities.supports_tools = *checked;
361 cx.notify();
362 })),
363 )
364 .child(
365 Checkbox::new(("supports-images", ix), model.capabilities.supports_images)
366 .label("Supports images")
367 .on_click(cx.listener(move |this, checked, _window, cx| {
368 this.input.models[ix].capabilities.supports_images = *checked;
369 cx.notify();
370 })),
371 )
372 .child(
373 Checkbox::new(
374 ("supports-parallel-tool-calls", ix),
375 model.capabilities.supports_parallel_tool_calls,
376 )
377 .label("Supports parallel_tool_calls")
378 .on_click(cx.listener(
379 move |this, checked, _window, cx| {
380 this.input.models[ix]
381 .capabilities
382 .supports_parallel_tool_calls = *checked;
383 cx.notify();
384 },
385 )),
386 )
387 .child(
388 Checkbox::new(
389 ("supports-prompt-cache-key", ix),
390 model.capabilities.supports_prompt_cache_key,
391 )
392 .label("Supports prompt_cache_key")
393 .on_click(cx.listener(
394 move |this, checked, _window, cx| {
395 this.input.models[ix].capabilities.supports_prompt_cache_key =
396 *checked;
397 cx.notify();
398 },
399 )),
400 ),
401 )
402 .when(has_more_than_one_model, |this| {
403 this.child(
404 Button::new(("remove-model", ix), "Remove Model")
405 .icon(IconName::Trash)
406 .icon_position(IconPosition::Start)
407 .icon_size(IconSize::XSmall)
408 .icon_color(Color::Muted)
409 .label_size(LabelSize::Small)
410 .style(ButtonStyle::Outlined)
411 .full_width()
412 .on_click(cx.listener(move |this, _, _window, cx| {
413 this.input.remove_model(ix);
414 cx.notify();
415 })),
416 )
417 })
418 }
419}
420
421impl EventEmitter<DismissEvent> for AddLlmProviderModal {}
422
423impl Focusable for AddLlmProviderModal {
424 fn focus_handle(&self, _cx: &App) -> FocusHandle {
425 self.focus_handle.clone()
426 }
427}
428
429impl ModalView for AddLlmProviderModal {}
430
431impl Render for AddLlmProviderModal {
432 fn render(&mut self, window: &mut ui::Window, cx: &mut ui::Context<Self>) -> impl IntoElement {
433 let focus_handle = self.focus_handle(cx);
434
435 div()
436 .id("add-llm-provider-modal")
437 .key_context("AddLlmProviderModal")
438 .w(rems(34.))
439 .elevation_3(cx)
440 .on_action(cx.listener(Self::cancel))
441 .capture_any_mouse_down(cx.listener(|this, _, window, cx| {
442 this.focus_handle(cx).focus(window);
443 }))
444 .child(
445 Modal::new("configure-context-server", None)
446 .header(ModalHeader::new().headline("Add LLM Provider").description(
447 match self.provider {
448 LlmCompatibleProvider::OpenAi => {
449 "This provider will use an OpenAI compatible API."
450 }
451 },
452 ))
453 .when_some(self.last_error.clone(), |this, error| {
454 this.section(
455 Section::new().child(
456 Banner::new()
457 .severity(Severity::Warning)
458 .child(div().text_xs().child(error)),
459 ),
460 )
461 })
462 .child(
463 v_flex()
464 .id("modal_content")
465 .size_full()
466 .max_h_128()
467 .overflow_y_scroll()
468 .px(DynamicSpacing::Base12.rems(cx))
469 .gap(DynamicSpacing::Base04.rems(cx))
470 .child(self.input.provider_name.clone())
471 .child(self.input.api_url.clone())
472 .child(self.input.api_key.clone())
473 .child(self.render_model_section(cx)),
474 )
475 .footer(
476 ModalFooter::new().end_slot(
477 h_flex()
478 .gap_1()
479 .child(
480 Button::new("cancel", "Cancel")
481 .key_binding(
482 KeyBinding::for_action_in(
483 &menu::Cancel,
484 &focus_handle,
485 window,
486 cx,
487 )
488 .map(|kb| kb.size(rems_from_px(12.))),
489 )
490 .on_click(cx.listener(|this, _event, window, cx| {
491 this.cancel(&menu::Cancel, window, cx)
492 })),
493 )
494 .child(
495 Button::new("save-server", "Save Provider")
496 .key_binding(
497 KeyBinding::for_action_in(
498 &menu::Confirm,
499 &focus_handle,
500 window,
501 cx,
502 )
503 .map(|kb| kb.size(rems_from_px(12.))),
504 )
505 .on_click(cx.listener(|this, _event, window, cx| {
506 this.confirm(&menu::Confirm, window, cx)
507 })),
508 ),
509 ),
510 ),
511 )
512 }
513}
514
515#[cfg(test)]
516mod tests {
517 use super::*;
518 use editor::EditorSettings;
519 use fs::FakeFs;
520 use gpui::{TestAppContext, VisualTestContext};
521 use language::language_settings;
522 use language_model::{
523 LanguageModelProviderId, LanguageModelProviderName,
524 fake_provider::FakeLanguageModelProvider,
525 };
526 use project::Project;
527 use settings::{Settings as _, SettingsStore};
528 use util::path;
529
530 #[gpui::test]
531 async fn test_save_provider_invalid_inputs(cx: &mut TestAppContext) {
532 let cx = setup_test(cx).await;
533
534 assert_eq!(
535 save_provider_validation_errors("", "someurl", "somekey", vec![], cx,).await,
536 Some("Provider Name cannot be empty".into())
537 );
538
539 assert_eq!(
540 save_provider_validation_errors("someprovider", "", "somekey", vec![], cx,).await,
541 Some("API URL cannot be empty".into())
542 );
543
544 assert_eq!(
545 save_provider_validation_errors("someprovider", "someurl", "", vec![], cx,).await,
546 Some("API Key cannot be empty".into())
547 );
548
549 assert_eq!(
550 save_provider_validation_errors(
551 "someprovider",
552 "someurl",
553 "somekey",
554 vec![("", "200000", "200000", "32000")],
555 cx,
556 )
557 .await,
558 Some("Model Name cannot be empty".into())
559 );
560
561 assert_eq!(
562 save_provider_validation_errors(
563 "someprovider",
564 "someurl",
565 "somekey",
566 vec![("somemodel", "abc", "200000", "32000")],
567 cx,
568 )
569 .await,
570 Some("Max Tokens must be a number".into())
571 );
572
573 assert_eq!(
574 save_provider_validation_errors(
575 "someprovider",
576 "someurl",
577 "somekey",
578 vec![("somemodel", "200000", "abc", "32000")],
579 cx,
580 )
581 .await,
582 Some("Max Completion Tokens must be a number".into())
583 );
584
585 assert_eq!(
586 save_provider_validation_errors(
587 "someprovider",
588 "someurl",
589 "somekey",
590 vec![("somemodel", "200000", "200000", "abc")],
591 cx,
592 )
593 .await,
594 Some("Max Output Tokens must be a number".into())
595 );
596
597 assert_eq!(
598 save_provider_validation_errors(
599 "someprovider",
600 "someurl",
601 "somekey",
602 vec![
603 ("somemodel", "200000", "200000", "32000"),
604 ("somemodel", "200000", "200000", "32000"),
605 ],
606 cx,
607 )
608 .await,
609 Some("Model Names must be unique".into())
610 );
611 }
612
613 #[gpui::test]
614 async fn test_save_provider_name_conflict(cx: &mut TestAppContext) {
615 let cx = setup_test(cx).await;
616
617 cx.update(|_window, cx| {
618 LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
619 registry.register_provider(
620 FakeLanguageModelProvider::new(
621 LanguageModelProviderId::new("someprovider"),
622 LanguageModelProviderName::new("Some Provider"),
623 ),
624 cx,
625 );
626 });
627 });
628
629 assert_eq!(
630 save_provider_validation_errors(
631 "someprovider",
632 "someurl",
633 "someapikey",
634 vec![("somemodel", "200000", "200000", "32000")],
635 cx,
636 )
637 .await,
638 Some("Provider Name is already taken by another provider".into())
639 );
640 }
641
642 #[gpui::test]
643 async fn test_model_input_default_capabilities(cx: &mut TestAppContext) {
644 let cx = setup_test(cx).await;
645
646 cx.update(|window, cx| {
647 let model_input = ModelInput::new(window, cx);
648 model_input.name.update(cx, |input, cx| {
649 input.editor().update(cx, |editor, cx| {
650 editor.set_text("somemodel", window, cx);
651 });
652 });
653 assert_eq!(
654 model_input.capabilities.supports_tools,
655 ToggleState::Selected
656 );
657 assert_eq!(
658 model_input.capabilities.supports_images,
659 ToggleState::Unselected
660 );
661 assert_eq!(
662 model_input.capabilities.supports_parallel_tool_calls,
663 ToggleState::Unselected
664 );
665 assert_eq!(
666 model_input.capabilities.supports_prompt_cache_key,
667 ToggleState::Unselected
668 );
669
670 let parsed_model = model_input.parse(cx).unwrap();
671 assert!(parsed_model.capabilities.tools);
672 assert!(!parsed_model.capabilities.images);
673 assert!(!parsed_model.capabilities.parallel_tool_calls);
674 assert!(!parsed_model.capabilities.prompt_cache_key);
675 });
676 }
677
678 #[gpui::test]
679 async fn test_model_input_deselected_capabilities(cx: &mut TestAppContext) {
680 let cx = setup_test(cx).await;
681
682 cx.update(|window, cx| {
683 let mut model_input = ModelInput::new(window, cx);
684 model_input.name.update(cx, |input, cx| {
685 input.editor().update(cx, |editor, cx| {
686 editor.set_text("somemodel", window, cx);
687 });
688 });
689
690 model_input.capabilities.supports_tools = ToggleState::Unselected;
691 model_input.capabilities.supports_images = ToggleState::Unselected;
692 model_input.capabilities.supports_parallel_tool_calls = ToggleState::Unselected;
693 model_input.capabilities.supports_prompt_cache_key = ToggleState::Unselected;
694
695 let parsed_model = model_input.parse(cx).unwrap();
696 assert!(!parsed_model.capabilities.tools);
697 assert!(!parsed_model.capabilities.images);
698 assert!(!parsed_model.capabilities.parallel_tool_calls);
699 assert!(!parsed_model.capabilities.prompt_cache_key);
700 });
701 }
702
703 #[gpui::test]
704 async fn test_model_input_with_name_and_capabilities(cx: &mut TestAppContext) {
705 let cx = setup_test(cx).await;
706
707 cx.update(|window, cx| {
708 let mut model_input = ModelInput::new(window, cx);
709 model_input.name.update(cx, |input, cx| {
710 input.editor().update(cx, |editor, cx| {
711 editor.set_text("somemodel", window, cx);
712 });
713 });
714
715 model_input.capabilities.supports_tools = ToggleState::Selected;
716 model_input.capabilities.supports_images = ToggleState::Unselected;
717 model_input.capabilities.supports_parallel_tool_calls = ToggleState::Selected;
718 model_input.capabilities.supports_prompt_cache_key = ToggleState::Unselected;
719
720 let parsed_model = model_input.parse(cx).unwrap();
721 assert_eq!(parsed_model.name, "somemodel");
722 assert!(parsed_model.capabilities.tools);
723 assert!(!parsed_model.capabilities.images);
724 assert!(parsed_model.capabilities.parallel_tool_calls);
725 assert!(!parsed_model.capabilities.prompt_cache_key);
726 });
727 }
728
729 async fn setup_test(cx: &mut TestAppContext) -> &mut VisualTestContext {
730 cx.update(|cx| {
731 let store = SettingsStore::test(cx);
732 cx.set_global(store);
733 workspace::init_settings(cx);
734 Project::init_settings(cx);
735 theme::init(theme::LoadThemes::JustBase, cx);
736 language_settings::init(cx);
737 EditorSettings::register(cx);
738 language_model::init_settings(cx);
739 language_models::init_settings(cx);
740 });
741
742 let fs = FakeFs::new(cx.executor());
743 cx.update(|cx| <dyn Fs>::set_global(fs.clone(), cx));
744 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
745 let (_, cx) =
746 cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
747
748 cx
749 }
750
751 async fn save_provider_validation_errors(
752 provider_name: &str,
753 api_url: &str,
754 api_key: &str,
755 models: Vec<(&str, &str, &str, &str)>,
756 cx: &mut VisualTestContext,
757 ) -> Option<SharedString> {
758 fn set_text(
759 input: &Entity<SingleLineInput>,
760 text: &str,
761 window: &mut Window,
762 cx: &mut App,
763 ) {
764 input.update(cx, |input, cx| {
765 input.editor().update(cx, |editor, cx| {
766 editor.set_text(text, window, cx);
767 });
768 });
769 }
770
771 let task = cx.update(|window, cx| {
772 let mut input = AddLlmProviderInput::new(LlmCompatibleProvider::OpenAi, window, cx);
773 set_text(&input.provider_name, provider_name, window, cx);
774 set_text(&input.api_url, api_url, window, cx);
775 set_text(&input.api_key, api_key, window, cx);
776
777 for (i, (name, max_tokens, max_completion_tokens, max_output_tokens)) in
778 models.iter().enumerate()
779 {
780 if i >= input.models.len() {
781 input.models.push(ModelInput::new(window, cx));
782 }
783 let model = &mut input.models[i];
784 set_text(&model.name, name, window, cx);
785 set_text(&model.max_tokens, max_tokens, window, cx);
786 set_text(
787 &model.max_completion_tokens,
788 max_completion_tokens,
789 window,
790 cx,
791 );
792 set_text(&model.max_output_tokens, max_output_tokens, window, cx);
793 }
794 save_provider_to_settings(&input, cx)
795 });
796
797 task.await.err()
798 }
799}