lmstudio.rs

   1use anyhow::{Result, anyhow};
   2use collections::HashMap;
   3use credentials_provider::CredentialsProvider;
   4use fs::Fs;
   5use futures::Stream;
   6use futures::{FutureExt, StreamExt, future::BoxFuture, stream::BoxStream};
   7use gpui::{AnyView, App, AsyncApp, Context, CursorStyle, Entity, Subscription, Task};
   8use http_client::HttpClient;
   9use language_model::{
  10    ApiKeyState, AuthenticateError, EnvVar, IconOrSvg, LanguageModel, LanguageModelCompletionError,
  11    LanguageModelCompletionEvent, LanguageModelToolChoice, LanguageModelToolResultContent,
  12    LanguageModelToolUse, MessageContent, StopReason, TokenUsage, env_var,
  13};
  14use language_model::{
  15    LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
  16    LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest, RateLimiter, Role,
  17};
  18use lmstudio::{LMSTUDIO_API_URL, ModelType, get_models};
  19
  20pub use settings::LmStudioAvailableModel as AvailableModel;
  21use settings::{Settings, SettingsStore, update_settings_file};
  22use std::pin::Pin;
  23use std::sync::LazyLock;
  24use std::{collections::BTreeMap, sync::Arc};
  25use ui::{
  26    ButtonLike, ConfiguredApiCard, ElevationIndex, List, ListBulletItem, Tooltip, prelude::*,
  27};
  28use ui_input::InputField;
  29
  30use crate::AllLanguageModelSettings;
  31use language_model::util::parse_tool_arguments;
  32
  33const LMSTUDIO_DOWNLOAD_URL: &str = "https://lmstudio.ai/download";
  34const LMSTUDIO_CATALOG_URL: &str = "https://lmstudio.ai/models";
  35const LMSTUDIO_SITE: &str = "https://lmstudio.ai/";
  36
  37const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("lmstudio");
  38const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("LM Studio");
  39
  40const API_KEY_ENV_VAR_NAME: &str = "LMSTUDIO_API_KEY";
  41static API_KEY_ENV_VAR: LazyLock<EnvVar> = env_var!(API_KEY_ENV_VAR_NAME);
  42
  43#[derive(Default, Debug, Clone, PartialEq)]
  44pub struct LmStudioSettings {
  45    pub api_url: String,
  46    pub available_models: Vec<AvailableModel>,
  47}
  48
  49pub struct LmStudioLanguageModelProvider {
  50    http_client: Arc<dyn HttpClient>,
  51    state: Entity<State>,
  52}
  53
  54pub struct State {
  55    api_key_state: ApiKeyState,
  56    credentials_provider: Arc<dyn CredentialsProvider>,
  57    http_client: Arc<dyn HttpClient>,
  58    available_models: Vec<lmstudio::Model>,
  59    fetch_model_task: Option<Task<Result<()>>>,
  60    _subscription: Subscription,
  61}
  62
  63impl State {
  64    fn is_authenticated(&self) -> bool {
  65        !self.available_models.is_empty()
  66    }
  67
  68    fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
  69        let credentials_provider = self.credentials_provider.clone();
  70        let api_url = LmStudioLanguageModelProvider::api_url(cx).into();
  71        let task = self.api_key_state.store(
  72            api_url,
  73            api_key,
  74            |this| &mut this.api_key_state,
  75            credentials_provider,
  76            cx,
  77        );
  78        self.restart_fetch_models_task(cx);
  79        task
  80    }
  81
  82    fn fetch_models(&mut self, cx: &mut Context<Self>) -> Task<Result<()>> {
  83        let settings = &AllLanguageModelSettings::get_global(cx).lmstudio;
  84        let http_client = self.http_client.clone();
  85        let api_url = settings.api_url.clone();
  86        let api_key = self.api_key_state.key(&api_url);
  87
  88        // As a proxy for the server being "authenticated", we'll check if its up by fetching the models
  89        cx.spawn(async move |this, cx| {
  90            let models =
  91                get_models(http_client.as_ref(), &api_url, api_key.as_deref(), None).await?;
  92
  93            let mut models: Vec<lmstudio::Model> = models
  94                .into_iter()
  95                .filter(|model| model.r#type != ModelType::Embeddings)
  96                .map(|model| {
  97                    lmstudio::Model::new(
  98                        &model.id,
  99                        None,
 100                        model
 101                            .loaded_context_length
 102                            .or_else(|| model.max_context_length),
 103                        model.capabilities.supports_tool_calls(),
 104                        model.capabilities.supports_images() || model.r#type == ModelType::Vlm,
 105                    )
 106                })
 107                .collect();
 108
 109            models.sort_by(|a, b| a.name.cmp(&b.name));
 110
 111            this.update(cx, |this, cx| {
 112                this.available_models = models;
 113                cx.notify();
 114            })
 115        })
 116    }
 117
 118    fn restart_fetch_models_task(&mut self, cx: &mut Context<Self>) {
 119        let task = self.fetch_models(cx);
 120        self.fetch_model_task.replace(task);
 121    }
 122
 123    fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
 124        let credentials_provider = self.credentials_provider.clone();
 125        let api_url = LmStudioLanguageModelProvider::api_url(cx).into();
 126        let _task = self.api_key_state.load_if_needed(
 127            api_url,
 128            |this| &mut this.api_key_state,
 129            credentials_provider,
 130            cx,
 131        );
 132
 133        if self.is_authenticated() {
 134            return Task::ready(Ok(()));
 135        }
 136
 137        let fetch_models_task = self.fetch_models(cx);
 138        cx.spawn(async move |_this, _cx| {
 139            match fetch_models_task.await {
 140                Ok(()) => Ok(()),
 141                Err(err) => {
 142                    // If any cause in the error chain is an std::io::Error with
 143                    // ErrorKind::ConnectionRefused, treat this as "credentials not found"
 144                    // (i.e. LM Studio not running).
 145                    let mut connection_refused = false;
 146                    for cause in err.chain() {
 147                        if let Some(io_err) = cause.downcast_ref::<std::io::Error>() {
 148                            if io_err.kind() == std::io::ErrorKind::ConnectionRefused {
 149                                connection_refused = true;
 150                                break;
 151                            }
 152                        }
 153                    }
 154                    if connection_refused {
 155                        Err(AuthenticateError::ConnectionRefused)
 156                    } else {
 157                        Err(AuthenticateError::Other(err))
 158                    }
 159                }
 160            }
 161        })
 162    }
 163}
 164
 165impl LmStudioLanguageModelProvider {
 166    pub fn new(
 167        http_client: Arc<dyn HttpClient>,
 168        credentials_provider: Arc<dyn CredentialsProvider>,
 169        cx: &mut App,
 170    ) -> Self {
 171        let this = Self {
 172            http_client: http_client.clone(),
 173            state: cx.new(|cx| {
 174                let subscription = cx.observe_global::<SettingsStore>({
 175                    let mut settings = AllLanguageModelSettings::get_global(cx).lmstudio.clone();
 176                    move |this: &mut State, cx| {
 177                        let new_settings =
 178                            AllLanguageModelSettings::get_global(cx).lmstudio.clone();
 179                        if settings != new_settings {
 180                            let credentials_provider = this.credentials_provider.clone();
 181                            let api_url = Self::api_url(cx).into();
 182                            this.api_key_state.handle_url_change(
 183                                api_url,
 184                                |this| &mut this.api_key_state,
 185                                credentials_provider,
 186                                cx,
 187                            );
 188                            settings = new_settings;
 189                            this.restart_fetch_models_task(cx);
 190                            cx.notify();
 191                        }
 192                    }
 193                });
 194
 195                State {
 196                    api_key_state: ApiKeyState::new(
 197                        Self::api_url(cx).into(),
 198                        (*API_KEY_ENV_VAR).clone(),
 199                    ),
 200                    credentials_provider,
 201                    http_client,
 202                    available_models: Default::default(),
 203                    fetch_model_task: None,
 204                    _subscription: subscription,
 205                }
 206            }),
 207        };
 208        this.state
 209            .update(cx, |state, cx| state.restart_fetch_models_task(cx));
 210        this
 211    }
 212
 213    fn api_url(cx: &App) -> String {
 214        AllLanguageModelSettings::get_global(cx)
 215            .lmstudio
 216            .api_url
 217            .clone()
 218    }
 219
 220    fn has_custom_url(cx: &App) -> bool {
 221        Self::api_url(cx) != LMSTUDIO_API_URL
 222    }
 223}
 224
 225impl LanguageModelProviderState for LmStudioLanguageModelProvider {
 226    type ObservableEntity = State;
 227
 228    fn observable_entity(&self) -> Option<Entity<Self::ObservableEntity>> {
 229        Some(self.state.clone())
 230    }
 231}
 232
 233impl LanguageModelProvider for LmStudioLanguageModelProvider {
 234    fn id(&self) -> LanguageModelProviderId {
 235        PROVIDER_ID
 236    }
 237
 238    fn name(&self) -> LanguageModelProviderName {
 239        PROVIDER_NAME
 240    }
 241
 242    fn icon(&self) -> IconOrSvg {
 243        IconOrSvg::Icon(IconName::AiLmStudio)
 244    }
 245
 246    fn default_model(&self, _: &App) -> Option<Arc<dyn LanguageModel>> {
 247        // We shouldn't try to select default model, because it might lead to a load call for an unloaded model.
 248        // In a constrained environment where user might not have enough resources it'll be a bad UX to select something
 249        // to load by default.
 250        None
 251    }
 252
 253    fn default_fast_model(&self, _: &App) -> Option<Arc<dyn LanguageModel>> {
 254        // See explanation for default_model.
 255        None
 256    }
 257
 258    fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
 259        let mut models: BTreeMap<String, lmstudio::Model> = BTreeMap::default();
 260
 261        // Add models from the LM Studio API
 262        for model in self.state.read(cx).available_models.iter() {
 263            models.insert(model.name.clone(), model.clone());
 264        }
 265
 266        // Override with available models from settings
 267        for model in AllLanguageModelSettings::get_global(cx)
 268            .lmstudio
 269            .available_models
 270            .iter()
 271        {
 272            models.insert(
 273                model.name.clone(),
 274                lmstudio::Model {
 275                    name: model.name.clone(),
 276                    display_name: model.display_name.clone(),
 277                    max_tokens: model.max_tokens,
 278                    supports_tool_calls: model.supports_tool_calls,
 279                    supports_images: model.supports_images,
 280                },
 281            );
 282        }
 283
 284        models
 285            .into_values()
 286            .map(|model| {
 287                Arc::new(LmStudioLanguageModel {
 288                    id: LanguageModelId::from(model.name.clone()),
 289                    model,
 290                    http_client: self.http_client.clone(),
 291                    request_limiter: RateLimiter::new(4),
 292                    state: self.state.clone(),
 293                }) as Arc<dyn LanguageModel>
 294            })
 295            .collect()
 296    }
 297
 298    fn is_authenticated(&self, cx: &App) -> bool {
 299        self.state.read(cx).is_authenticated()
 300    }
 301
 302    fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
 303        self.state.update(cx, |state, cx| state.authenticate(cx))
 304    }
 305
 306    fn configuration_view(
 307        &self,
 308        _target_agent: language_model::ConfigurationViewTargetAgent,
 309        _window: &mut Window,
 310        cx: &mut App,
 311    ) -> AnyView {
 312        cx.new(|cx| ConfigurationView::new(self.state.clone(), _window, cx))
 313            .into()
 314    }
 315
 316    fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
 317        self.state
 318            .update(cx, |state, cx| state.set_api_key(None, cx))
 319    }
 320}
 321
 322pub struct LmStudioLanguageModel {
 323    id: LanguageModelId,
 324    model: lmstudio::Model,
 325    http_client: Arc<dyn HttpClient>,
 326    request_limiter: RateLimiter,
 327    state: Entity<State>,
 328}
 329
 330impl LmStudioLanguageModel {
 331    fn to_lmstudio_request(
 332        &self,
 333        request: LanguageModelRequest,
 334    ) -> lmstudio::ChatCompletionRequest {
 335        let mut messages = Vec::new();
 336
 337        for message in request.messages {
 338            for content in message.content {
 339                match content {
 340                    MessageContent::Text(text) => add_message_content_part(
 341                        lmstudio::MessagePart::Text { text },
 342                        message.role,
 343                        &mut messages,
 344                    ),
 345                    MessageContent::Thinking { .. } => {}
 346                    MessageContent::RedactedThinking(_) => {}
 347                    MessageContent::Image(image) => {
 348                        add_message_content_part(
 349                            lmstudio::MessagePart::Image {
 350                                image_url: lmstudio::ImageUrl {
 351                                    url: image.to_base64_url(),
 352                                    detail: None,
 353                                },
 354                            },
 355                            message.role,
 356                            &mut messages,
 357                        );
 358                    }
 359                    MessageContent::ToolUse(tool_use) => {
 360                        let tool_call = lmstudio::ToolCall {
 361                            id: tool_use.id.to_string(),
 362                            content: lmstudio::ToolCallContent::Function {
 363                                function: lmstudio::FunctionContent {
 364                                    name: tool_use.name.to_string(),
 365                                    arguments: serde_json::to_string(&tool_use.input)
 366                                        .unwrap_or_default(),
 367                                },
 368                            },
 369                        };
 370
 371                        if let Some(lmstudio::ChatMessage::Assistant { tool_calls, .. }) =
 372                            messages.last_mut()
 373                        {
 374                            tool_calls.push(tool_call);
 375                        } else {
 376                            messages.push(lmstudio::ChatMessage::Assistant {
 377                                content: None,
 378                                tool_calls: vec![tool_call],
 379                            });
 380                        }
 381                    }
 382                    MessageContent::ToolResult(tool_result) => {
 383                        let content = match &tool_result.content {
 384                            LanguageModelToolResultContent::Text(text) => {
 385                                vec![lmstudio::MessagePart::Text {
 386                                    text: text.to_string(),
 387                                }]
 388                            }
 389                            LanguageModelToolResultContent::Image(image) => {
 390                                vec![lmstudio::MessagePart::Image {
 391                                    image_url: lmstudio::ImageUrl {
 392                                        url: image.to_base64_url(),
 393                                        detail: None,
 394                                    },
 395                                }]
 396                            }
 397                        };
 398
 399                        messages.push(lmstudio::ChatMessage::Tool {
 400                            content: content.into(),
 401                            tool_call_id: tool_result.tool_use_id.to_string(),
 402                        });
 403                    }
 404                }
 405            }
 406        }
 407
 408        lmstudio::ChatCompletionRequest {
 409            model: self.model.name.clone(),
 410            messages,
 411            stream: true,
 412            max_tokens: Some(-1),
 413            stop: Some(request.stop),
 414            // In LM Studio you can configure specific settings you'd like to use for your model.
 415            // For example Qwen3 is recommended to be used with 0.7 temperature.
 416            // It would be a bad UX to silently override these settings from Zed, so we pass no temperature as a default.
 417            temperature: request.temperature.or(None),
 418            tools: request
 419                .tools
 420                .into_iter()
 421                .map(|tool| lmstudio::ToolDefinition::Function {
 422                    function: lmstudio::FunctionDefinition {
 423                        name: tool.name,
 424                        description: Some(tool.description),
 425                        parameters: Some(tool.input_schema),
 426                    },
 427                })
 428                .collect(),
 429            tool_choice: request.tool_choice.map(|choice| match choice {
 430                LanguageModelToolChoice::Auto => lmstudio::ToolChoice::Auto,
 431                LanguageModelToolChoice::Any => lmstudio::ToolChoice::Required,
 432                LanguageModelToolChoice::None => lmstudio::ToolChoice::None,
 433            }),
 434        }
 435    }
 436
 437    fn stream_completion(
 438        &self,
 439        request: lmstudio::ChatCompletionRequest,
 440        cx: &AsyncApp,
 441    ) -> BoxFuture<
 442        'static,
 443        Result<futures::stream::BoxStream<'static, Result<lmstudio::ResponseStreamEvent>>>,
 444    > {
 445        let http_client = self.http_client.clone();
 446        let (api_key, api_url) = self.state.read_with(cx, |state, cx| {
 447            let api_url = LmStudioLanguageModelProvider::api_url(cx);
 448            (state.api_key_state.key(&api_url), api_url)
 449        });
 450
 451        let future = self.request_limiter.stream(async move {
 452            let stream = lmstudio::stream_chat_completion(
 453                http_client.as_ref(),
 454                &api_url,
 455                api_key.as_deref(),
 456                request,
 457            )
 458            .await?;
 459            Ok(stream)
 460        });
 461
 462        async move { Ok(future.await?.boxed()) }.boxed()
 463    }
 464}
 465
 466impl LanguageModel for LmStudioLanguageModel {
 467    fn id(&self) -> LanguageModelId {
 468        self.id.clone()
 469    }
 470
 471    fn name(&self) -> LanguageModelName {
 472        LanguageModelName::from(self.model.display_name().to_string())
 473    }
 474
 475    fn provider_id(&self) -> LanguageModelProviderId {
 476        PROVIDER_ID
 477    }
 478
 479    fn provider_name(&self) -> LanguageModelProviderName {
 480        PROVIDER_NAME
 481    }
 482
 483    fn supports_tools(&self) -> bool {
 484        self.model.supports_tool_calls()
 485    }
 486
 487    fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
 488        self.supports_tools()
 489            && match choice {
 490                LanguageModelToolChoice::Auto => true,
 491                LanguageModelToolChoice::Any => true,
 492                LanguageModelToolChoice::None => true,
 493            }
 494    }
 495
 496    fn supports_images(&self) -> bool {
 497        self.model.supports_images
 498    }
 499
 500    fn telemetry_id(&self) -> String {
 501        format!("lmstudio/{}", self.model.id())
 502    }
 503
 504    fn max_token_count(&self) -> u64 {
 505        self.model.max_token_count()
 506    }
 507
 508    fn stream_completion(
 509        &self,
 510        request: LanguageModelRequest,
 511        cx: &AsyncApp,
 512    ) -> BoxFuture<
 513        'static,
 514        Result<
 515            BoxStream<'static, Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>,
 516            LanguageModelCompletionError,
 517        >,
 518    > {
 519        let request = self.to_lmstudio_request(request);
 520        let completions = self.stream_completion(request, cx);
 521        async move {
 522            let mapper = LmStudioEventMapper::new();
 523            Ok(mapper.map_stream(completions.await?).boxed())
 524        }
 525        .boxed()
 526    }
 527}
 528
 529struct LmStudioEventMapper {
 530    tool_calls_by_index: HashMap<usize, RawToolCall>,
 531}
 532
 533impl LmStudioEventMapper {
 534    fn new() -> Self {
 535        Self {
 536            tool_calls_by_index: HashMap::default(),
 537        }
 538    }
 539
 540    pub fn map_stream(
 541        mut self,
 542        events: Pin<Box<dyn Send + Stream<Item = Result<lmstudio::ResponseStreamEvent>>>>,
 543    ) -> impl Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>
 544    {
 545        events.flat_map(move |event| {
 546            futures::stream::iter(match event {
 547                Ok(event) => self.map_event(event),
 548                Err(error) => vec![Err(LanguageModelCompletionError::from(error))],
 549            })
 550        })
 551    }
 552
 553    pub fn map_event(
 554        &mut self,
 555        event: lmstudio::ResponseStreamEvent,
 556    ) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
 557        let Some(choice) = event.choices.into_iter().next() else {
 558            return vec![Err(LanguageModelCompletionError::from(anyhow!(
 559                "Response contained no choices"
 560            )))];
 561        };
 562
 563        let mut events = Vec::new();
 564        if let Some(content) = choice.delta.content {
 565            events.push(Ok(LanguageModelCompletionEvent::Text(content)));
 566        }
 567
 568        if let Some(reasoning_content) = choice.delta.reasoning_content {
 569            events.push(Ok(LanguageModelCompletionEvent::Thinking {
 570                text: reasoning_content,
 571                signature: None,
 572            }));
 573        }
 574
 575        if let Some(tool_calls) = choice.delta.tool_calls {
 576            for tool_call in tool_calls {
 577                let entry = self.tool_calls_by_index.entry(tool_call.index).or_default();
 578
 579                if let Some(tool_id) = tool_call.id {
 580                    entry.id = tool_id;
 581                }
 582
 583                if let Some(function) = tool_call.function {
 584                    if let Some(name) = function.name {
 585                        // At the time of writing this code LM Studio (0.3.15) is incompatible with the OpenAI API:
 586                        // 1. It sends function name in the first chunk
 587                        // 2. It sends empty string in the function name field in all subsequent chunks for arguments
 588                        // According to https://platform.openai.com/docs/guides/function-calling?api-mode=responses#streaming
 589                        // function name field should be sent only inside the first chunk.
 590                        if !name.is_empty() {
 591                            entry.name = name;
 592                        }
 593                    }
 594
 595                    if let Some(arguments) = function.arguments {
 596                        entry.arguments.push_str(&arguments);
 597                    }
 598                }
 599            }
 600        }
 601
 602        if let Some(usage) = event.usage {
 603            events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(TokenUsage {
 604                input_tokens: usage.prompt_tokens,
 605                output_tokens: usage.completion_tokens,
 606                cache_creation_input_tokens: 0,
 607                cache_read_input_tokens: 0,
 608            })));
 609        }
 610
 611        match choice.finish_reason.as_deref() {
 612            Some("stop") => {
 613                events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)));
 614            }
 615            Some("tool_calls") => {
 616                events.extend(self.tool_calls_by_index.drain().map(|(_, tool_call)| {
 617                    match parse_tool_arguments(&tool_call.arguments) {
 618                        Ok(input) => Ok(LanguageModelCompletionEvent::ToolUse(
 619                            LanguageModelToolUse {
 620                                id: tool_call.id.into(),
 621                                name: tool_call.name.into(),
 622                                is_input_complete: true,
 623                                input,
 624                                raw_input: tool_call.arguments,
 625                                thought_signature: None,
 626                            },
 627                        )),
 628                        Err(error) => Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
 629                            id: tool_call.id.into(),
 630                            tool_name: tool_call.name.into(),
 631                            raw_input: tool_call.arguments.into(),
 632                            json_parse_error: error.to_string(),
 633                        }),
 634                    }
 635                }));
 636
 637                events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::ToolUse)));
 638            }
 639            Some(stop_reason) => {
 640                log::error!("Unexpected LMStudio stop_reason: {stop_reason:?}",);
 641                events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)));
 642            }
 643            None => {}
 644        }
 645
 646        events
 647    }
 648}
 649
 650#[derive(Default)]
 651struct RawToolCall {
 652    id: String,
 653    name: String,
 654    arguments: String,
 655}
 656
 657fn add_message_content_part(
 658    new_part: lmstudio::MessagePart,
 659    role: Role,
 660    messages: &mut Vec<lmstudio::ChatMessage>,
 661) {
 662    match (role, messages.last_mut()) {
 663        (Role::User, Some(lmstudio::ChatMessage::User { content }))
 664        | (
 665            Role::Assistant,
 666            Some(lmstudio::ChatMessage::Assistant {
 667                content: Some(content),
 668                ..
 669            }),
 670        )
 671        | (Role::System, Some(lmstudio::ChatMessage::System { content })) => {
 672            content.push_part(new_part);
 673        }
 674        _ => {
 675            messages.push(match role {
 676                Role::User => lmstudio::ChatMessage::User {
 677                    content: lmstudio::MessageContent::from(vec![new_part]),
 678                },
 679                Role::Assistant => lmstudio::ChatMessage::Assistant {
 680                    content: Some(lmstudio::MessageContent::from(vec![new_part])),
 681                    tool_calls: Vec::new(),
 682                },
 683                Role::System => lmstudio::ChatMessage::System {
 684                    content: lmstudio::MessageContent::from(vec![new_part]),
 685                },
 686            });
 687        }
 688    }
 689}
 690
 691struct ConfigurationView {
 692    state: Entity<State>,
 693    api_key_editor: Entity<InputField>,
 694    api_url_editor: Entity<InputField>,
 695}
 696
 697impl ConfigurationView {
 698    pub fn new(state: Entity<State>, _window: &mut Window, cx: &mut Context<Self>) -> Self {
 699        let api_key_editor = cx.new(|cx| InputField::new(_window, cx, "sk-...").label("API key"));
 700
 701        let api_url_editor = cx.new(|cx| {
 702            let input = InputField::new(_window, cx, LMSTUDIO_API_URL).label("API URL");
 703            input.set_text(&LmStudioLanguageModelProvider::api_url(cx), _window, cx);
 704            input
 705        });
 706
 707        cx.observe(&state, |_, _, cx| {
 708            cx.notify();
 709        })
 710        .detach();
 711
 712        Self {
 713            state,
 714            api_key_editor,
 715            api_url_editor,
 716        }
 717    }
 718
 719    fn retry_connection(&mut self, _window: &mut Window, cx: &mut Context<Self>) {
 720        let has_api_url = LmStudioLanguageModelProvider::has_custom_url(cx);
 721        let has_api_key = self
 722            .state
 723            .read_with(cx, |state, _| state.api_key_state.has_key());
 724        if !has_api_url {
 725            self.save_api_url(cx);
 726        }
 727        if !has_api_key {
 728            self.save_api_key(&Default::default(), _window, cx);
 729        }
 730
 731        self.state.update(cx, |state, cx| {
 732            state.restart_fetch_models_task(cx);
 733        });
 734    }
 735
 736    fn save_api_key(&mut self, _: &menu::Confirm, _window: &mut Window, cx: &mut Context<Self>) {
 737        let api_key = self.api_key_editor.read(cx).text(cx).trim().to_string();
 738        if api_key.is_empty() {
 739            return;
 740        }
 741
 742        self.api_key_editor
 743            .update(cx, |input, cx| input.set_text("", _window, cx));
 744
 745        let state = self.state.clone();
 746        cx.spawn_in(_window, async move |_, cx| {
 747            state
 748                .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))
 749                .await
 750        })
 751        .detach_and_log_err(cx);
 752    }
 753
 754    fn reset_api_key(&mut self, _window: &mut Window, cx: &mut Context<Self>) {
 755        self.api_key_editor
 756            .update(cx, |input, cx| input.set_text("", _window, cx));
 757
 758        let state = self.state.clone();
 759        cx.spawn_in(_window, async move |_, cx| {
 760            state
 761                .update(cx, |state, cx| state.set_api_key(None, cx))
 762                .await
 763        })
 764        .detach_and_log_err(cx);
 765
 766        cx.notify();
 767    }
 768
 769    fn save_api_url(&self, cx: &mut Context<Self>) {
 770        let api_url = self.api_url_editor.read(cx).text(cx).trim().to_string();
 771        let current_url = LmStudioLanguageModelProvider::api_url(cx);
 772        if !api_url.is_empty() && &api_url != &current_url {
 773            self.state
 774                .update(cx, |state, cx| state.set_api_key(None, cx))
 775                .detach_and_log_err(cx);
 776
 777            let fs = <dyn Fs>::global(cx);
 778            update_settings_file(fs, cx, move |settings, _| {
 779                settings
 780                    .language_models
 781                    .get_or_insert_default()
 782                    .lmstudio
 783                    .get_or_insert_default()
 784                    .api_url = Some(api_url);
 785            });
 786        }
 787    }
 788
 789    fn reset_api_url(&mut self, _window: &mut Window, cx: &mut Context<Self>) {
 790        self.api_url_editor
 791            .update(cx, |input, cx| input.set_text("", _window, cx));
 792
 793        // Clear API key when URL changes since keys are URL-specific
 794        self.state
 795            .update(cx, |state, cx| state.set_api_key(None, cx))
 796            .detach_and_log_err(cx);
 797
 798        let fs = <dyn Fs>::global(cx);
 799        update_settings_file(fs, cx, |settings, _cx| {
 800            if let Some(settings) = settings
 801                .language_models
 802                .as_mut()
 803                .and_then(|models| models.lmstudio.as_mut())
 804            {
 805                settings.api_url = Some(LMSTUDIO_API_URL.into());
 806            }
 807        });
 808        cx.notify();
 809    }
 810
 811    fn render_api_url_editor(&self, cx: &Context<Self>) -> impl IntoElement {
 812        let api_url = LmStudioLanguageModelProvider::api_url(cx);
 813        let custom_api_url_set = api_url != LMSTUDIO_API_URL;
 814
 815        if custom_api_url_set {
 816            h_flex()
 817                .p_3()
 818                .justify_between()
 819                .rounded_md()
 820                .border_1()
 821                .border_color(cx.theme().colors().border)
 822                .bg(cx.theme().colors().elevated_surface_background)
 823                .child(
 824                    h_flex()
 825                        .gap_2()
 826                        .child(Icon::new(IconName::Check).color(Color::Success))
 827                        .child(v_flex().gap_1().child(Label::new(api_url))),
 828                )
 829                .child(
 830                    Button::new("reset-api-url", "Reset API URL")
 831                        .label_size(LabelSize::Small)
 832                        .start_icon(Icon::new(IconName::Undo).size(IconSize::Small))
 833                        .layer(ElevationIndex::ModalSurface)
 834                        .on_click(
 835                            cx.listener(|this, _, _window, cx| this.reset_api_url(_window, cx)),
 836                        ),
 837                )
 838                .into_any_element()
 839        } else {
 840            v_flex()
 841                .on_action(cx.listener(|this, _: &menu::Confirm, _window, cx| {
 842                    this.save_api_url(cx);
 843                    cx.notify();
 844                }))
 845                .gap_2()
 846                .child(self.api_url_editor.clone())
 847                .into_any_element()
 848        }
 849    }
 850
 851    fn render_api_key_editor(&self, cx: &Context<Self>) -> impl IntoElement {
 852        let state = self.state.read(cx);
 853        let env_var_set = state.api_key_state.is_from_env_var();
 854        let configured_card_label = if env_var_set {
 855            format!("API key set in {API_KEY_ENV_VAR_NAME} environment variable.")
 856        } else {
 857            "API key configured".to_string()
 858        };
 859
 860        if !state.api_key_state.has_key() {
 861            v_flex()
 862                .on_action(cx.listener(Self::save_api_key))
 863                .child(self.api_key_editor.clone())
 864                .child(
 865                    Label::new(format!(
 866                        "You can also set the {API_KEY_ENV_VAR_NAME} environment variable and restart Zed."
 867                    ))
 868                    .size(LabelSize::Small)
 869                    .color(Color::Muted),
 870                )
 871                .into_any_element()
 872        } else {
 873            ConfiguredApiCard::new(configured_card_label)
 874                .disabled(env_var_set)
 875                .on_click(cx.listener(|this, _, _window, cx| this.reset_api_key(_window, cx)))
 876                .when(env_var_set, |this| {
 877                    this.tooltip_label(format!(
 878                        "To reset your API key, unset the {API_KEY_ENV_VAR_NAME} environment variable."
 879                    ))
 880                })
 881                .into_any_element()
 882        }
 883    }
 884}
 885
 886impl Render for ConfigurationView {
 887    fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
 888        let is_authenticated = self.state.read(cx).is_authenticated();
 889
 890        v_flex()
 891            .gap_2()
 892            .child(
 893                v_flex()
 894                    .gap_1()
 895                    .child(Label::new("Run local LLMs like Llama, Phi, and Qwen."))
 896                    .child(
 897                        List::new()
 898                            .child(ListBulletItem::new(
 899                                "LM Studio needs to be running with at least one model downloaded.",
 900                            ))
 901                            .child(
 902                                ListBulletItem::new("")
 903                                    .child(Label::new("To get your first model, try running"))
 904                                    .child(Label::new("lms get qwen2.5-coder-7b").inline_code(cx)),
 905                            ),
 906                    )
 907                    .child(Label::new(
 908                        "Alternatively, you can connect to an LM Studio server by specifying its \
 909                        URL and API key (may not be required):",
 910                    )),
 911            )
 912            .child(self.render_api_url_editor(cx))
 913            .child(self.render_api_key_editor(cx))
 914            .child(
 915                h_flex()
 916                    .w_full()
 917                    .justify_between()
 918                    .gap_2()
 919                    .child(
 920                        h_flex()
 921                            .w_full()
 922                            .gap_2()
 923                            .map(|this| {
 924                                if is_authenticated {
 925                                    this.child(
 926                                        Button::new("lmstudio-site", "LM Studio")
 927                                            .style(ButtonStyle::Subtle)
 928                                            .end_icon(
 929                                                Icon::new(IconName::ArrowUpRight)
 930                                                    .size(IconSize::Small)
 931                                                    .color(Color::Muted),
 932                                            )
 933                                            .on_click(move |_, _window, cx| {
 934                                                cx.open_url(LMSTUDIO_SITE)
 935                                            })
 936                                            .into_any_element(),
 937                                    )
 938                                } else {
 939                                    this.child(
 940                                        Button::new(
 941                                            "download_lmstudio_button",
 942                                            "Download LM Studio",
 943                                        )
 944                                        .style(ButtonStyle::Subtle)
 945                                        .end_icon(
 946                                            Icon::new(IconName::ArrowUpRight)
 947                                                .size(IconSize::Small)
 948                                                .color(Color::Muted),
 949                                        )
 950                                        .on_click(move |_, _window, cx| {
 951                                            cx.open_url(LMSTUDIO_DOWNLOAD_URL)
 952                                        })
 953                                        .into_any_element(),
 954                                    )
 955                                }
 956                            })
 957                            .child(
 958                                Button::new("view-models", "Model Catalog")
 959                                    .style(ButtonStyle::Subtle)
 960                                    .end_icon(
 961                                        Icon::new(IconName::ArrowUpRight)
 962                                            .size(IconSize::Small)
 963                                            .color(Color::Muted),
 964                                    )
 965                                    .on_click(move |_, _window, cx| {
 966                                        cx.open_url(LMSTUDIO_CATALOG_URL)
 967                                    }),
 968                            ),
 969                    )
 970                    .map(|this| {
 971                        if is_authenticated {
 972                            this.child(
 973                                ButtonLike::new("connected")
 974                                    .disabled(true)
 975                                    .cursor_style(CursorStyle::Arrow)
 976                                    .child(
 977                                        h_flex()
 978                                            .gap_2()
 979                                            .child(Icon::new(IconName::Check).color(Color::Success))
 980                                            .child(Label::new("Connected"))
 981                                            .into_any_element(),
 982                                    )
 983                                    .child(
 984                                        IconButton::new("refresh-models", IconName::RotateCcw)
 985                                            .tooltip(Tooltip::text("Refresh Models"))
 986                                            .on_click(cx.listener(|this, _, _window, cx| {
 987                                                this.state.update(cx, |state, _| {
 988                                                    state.available_models.clear();
 989                                                });
 990                                                this.retry_connection(_window, cx);
 991                                            })),
 992                                    ),
 993                            )
 994                        } else {
 995                            this.child(
 996                                Button::new("retry_lmstudio_models", "Connect")
 997                                    .start_icon(
 998                                        Icon::new(IconName::PlayFilled).size(IconSize::XSmall),
 999                                    )
1000                                    .on_click(cx.listener(move |this, _, _window, cx| {
1001                                        this.retry_connection(_window, cx)
1002                                    })),
1003                            )
1004                        }
1005                    }),
1006            )
1007    }
1008}