settings.rs

  1use std::sync::Arc;
  2
  3use anyhow::Result;
  4use gpui::App;
  5use language_model::LanguageModelCacheConfiguration;
  6use project::Fs;
  7use schemars::JsonSchema;
  8use serde::{Deserialize, Serialize};
  9use settings::{update_settings_file, Settings, SettingsSources};
 10
 11use crate::provider::{
 12    self,
 13    anthropic::AnthropicSettings,
 14    cloud::{self, ZedDotDevSettings},
 15    copilot_chat::CopilotChatSettings,
 16    deepseek::DeepSeekSettings,
 17    google::GoogleSettings,
 18    lmstudio::LmStudioSettings,
 19    mistral::MistralSettings,
 20    ollama::OllamaSettings,
 21    open_ai::OpenAiSettings,
 22};
 23
 24/// Initializes the language model settings.
 25pub fn init(fs: Arc<dyn Fs>, cx: &mut App) {
 26    AllLanguageModelSettings::register(cx);
 27
 28    if AllLanguageModelSettings::get_global(cx)
 29        .openai
 30        .needs_setting_migration
 31    {
 32        update_settings_file::<AllLanguageModelSettings>(fs.clone(), cx, move |setting, _| {
 33            if let Some(settings) = setting.openai.clone() {
 34                let (newest_version, _) = settings.upgrade();
 35                setting.openai = Some(OpenAiSettingsContent::Versioned(
 36                    VersionedOpenAiSettingsContent::V1(newest_version),
 37                ));
 38            }
 39        });
 40    }
 41
 42    if AllLanguageModelSettings::get_global(cx)
 43        .anthropic
 44        .needs_setting_migration
 45    {
 46        update_settings_file::<AllLanguageModelSettings>(fs, cx, move |setting, _| {
 47            if let Some(settings) = setting.anthropic.clone() {
 48                let (newest_version, _) = settings.upgrade();
 49                setting.anthropic = Some(AnthropicSettingsContent::Versioned(
 50                    VersionedAnthropicSettingsContent::V1(newest_version),
 51                ));
 52            }
 53        });
 54    }
 55}
 56
 57#[derive(Default)]
 58pub struct AllLanguageModelSettings {
 59    pub anthropic: AnthropicSettings,
 60    pub ollama: OllamaSettings,
 61    pub openai: OpenAiSettings,
 62    pub zed_dot_dev: ZedDotDevSettings,
 63    pub google: GoogleSettings,
 64    pub copilot_chat: CopilotChatSettings,
 65    pub lmstudio: LmStudioSettings,
 66    pub deepseek: DeepSeekSettings,
 67    pub mistral: MistralSettings,
 68}
 69
 70#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
 71pub struct AllLanguageModelSettingsContent {
 72    pub anthropic: Option<AnthropicSettingsContent>,
 73    pub ollama: Option<OllamaSettingsContent>,
 74    pub lmstudio: Option<LmStudioSettingsContent>,
 75    pub openai: Option<OpenAiSettingsContent>,
 76    #[serde(rename = "zed.dev")]
 77    pub zed_dot_dev: Option<ZedDotDevSettingsContent>,
 78    pub google: Option<GoogleSettingsContent>,
 79    pub deepseek: Option<DeepseekSettingsContent>,
 80    pub copilot_chat: Option<CopilotChatSettingsContent>,
 81    pub mistral: Option<MistralSettingsContent>,
 82}
 83
 84#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
 85#[serde(untagged)]
 86pub enum AnthropicSettingsContent {
 87    Legacy(LegacyAnthropicSettingsContent),
 88    Versioned(VersionedAnthropicSettingsContent),
 89}
 90
 91impl AnthropicSettingsContent {
 92    pub fn upgrade(self) -> (AnthropicSettingsContentV1, bool) {
 93        match self {
 94            AnthropicSettingsContent::Legacy(content) => (
 95                AnthropicSettingsContentV1 {
 96                    api_url: content.api_url,
 97                    available_models: content.available_models.map(|models| {
 98                        models
 99                            .into_iter()
100                            .filter_map(|model| match model {
101                                anthropic::Model::Custom {
102                                    name,
103                                    display_name,
104                                    max_tokens,
105                                    tool_override,
106                                    cache_configuration,
107                                    max_output_tokens,
108                                    default_temperature,
109                                    extra_beta_headers,
110                                } => Some(provider::anthropic::AvailableModel {
111                                    name,
112                                    display_name,
113                                    max_tokens,
114                                    tool_override,
115                                    cache_configuration: cache_configuration.as_ref().map(
116                                        |config| LanguageModelCacheConfiguration {
117                                            max_cache_anchors: config.max_cache_anchors,
118                                            should_speculate: config.should_speculate,
119                                            min_total_token: config.min_total_token,
120                                        },
121                                    ),
122                                    max_output_tokens,
123                                    default_temperature,
124                                    extra_beta_headers,
125                                }),
126                                _ => None,
127                            })
128                            .collect()
129                    }),
130                },
131                true,
132            ),
133            AnthropicSettingsContent::Versioned(content) => match content {
134                VersionedAnthropicSettingsContent::V1(content) => (content, false),
135            },
136        }
137    }
138}
139
140#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
141pub struct LegacyAnthropicSettingsContent {
142    pub api_url: Option<String>,
143    pub available_models: Option<Vec<anthropic::Model>>,
144}
145
146#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
147#[serde(tag = "version")]
148pub enum VersionedAnthropicSettingsContent {
149    #[serde(rename = "1")]
150    V1(AnthropicSettingsContentV1),
151}
152
153#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
154pub struct AnthropicSettingsContentV1 {
155    pub api_url: Option<String>,
156    pub available_models: Option<Vec<provider::anthropic::AvailableModel>>,
157}
158
159#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
160pub struct OllamaSettingsContent {
161    pub api_url: Option<String>,
162    pub available_models: Option<Vec<provider::ollama::AvailableModel>>,
163}
164
165#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
166pub struct LmStudioSettingsContent {
167    pub api_url: Option<String>,
168    pub available_models: Option<Vec<provider::lmstudio::AvailableModel>>,
169}
170
171#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
172pub struct DeepseekSettingsContent {
173    pub api_url: Option<String>,
174    pub available_models: Option<Vec<provider::deepseek::AvailableModel>>,
175}
176
177#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
178pub struct MistralSettingsContent {
179    pub api_url: Option<String>,
180    pub available_models: Option<Vec<provider::mistral::AvailableModel>>,
181}
182
183#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
184#[serde(untagged)]
185pub enum OpenAiSettingsContent {
186    Legacy(LegacyOpenAiSettingsContent),
187    Versioned(VersionedOpenAiSettingsContent),
188}
189
190impl OpenAiSettingsContent {
191    pub fn upgrade(self) -> (OpenAiSettingsContentV1, bool) {
192        match self {
193            OpenAiSettingsContent::Legacy(content) => (
194                OpenAiSettingsContentV1 {
195                    api_url: content.api_url,
196                    available_models: content.available_models.map(|models| {
197                        models
198                            .into_iter()
199                            .filter_map(|model| match model {
200                                open_ai::Model::Custom {
201                                    name,
202                                    display_name,
203                                    max_tokens,
204                                    max_output_tokens,
205                                    max_completion_tokens,
206                                } => Some(provider::open_ai::AvailableModel {
207                                    name,
208                                    max_tokens,
209                                    max_output_tokens,
210                                    display_name,
211                                    max_completion_tokens,
212                                }),
213                                _ => None,
214                            })
215                            .collect()
216                    }),
217                },
218                true,
219            ),
220            OpenAiSettingsContent::Versioned(content) => match content {
221                VersionedOpenAiSettingsContent::V1(content) => (content, false),
222            },
223        }
224    }
225}
226
227#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
228pub struct LegacyOpenAiSettingsContent {
229    pub api_url: Option<String>,
230    pub available_models: Option<Vec<open_ai::Model>>,
231}
232
233#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
234#[serde(tag = "version")]
235pub enum VersionedOpenAiSettingsContent {
236    #[serde(rename = "1")]
237    V1(OpenAiSettingsContentV1),
238}
239
240#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
241pub struct OpenAiSettingsContentV1 {
242    pub api_url: Option<String>,
243    pub available_models: Option<Vec<provider::open_ai::AvailableModel>>,
244}
245
246#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
247pub struct GoogleSettingsContent {
248    pub api_url: Option<String>,
249    pub available_models: Option<Vec<provider::google::AvailableModel>>,
250}
251
252#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
253pub struct ZedDotDevSettingsContent {
254    available_models: Option<Vec<cloud::AvailableModel>>,
255}
256
257#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
258pub struct CopilotChatSettingsContent {}
259
260impl settings::Settings for AllLanguageModelSettings {
261    const KEY: Option<&'static str> = Some("language_models");
262
263    const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
264
265    type FileContent = AllLanguageModelSettingsContent;
266
267    fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
268        fn merge<T>(target: &mut T, value: Option<T>) {
269            if let Some(value) = value {
270                *target = value;
271            }
272        }
273
274        let mut settings = AllLanguageModelSettings::default();
275
276        for value in sources.defaults_and_customizations() {
277            // Anthropic
278            let (anthropic, upgraded) = match value.anthropic.clone().map(|s| s.upgrade()) {
279                Some((content, upgraded)) => (Some(content), upgraded),
280                None => (None, false),
281            };
282
283            if upgraded {
284                settings.anthropic.needs_setting_migration = true;
285            }
286
287            merge(
288                &mut settings.anthropic.api_url,
289                anthropic.as_ref().and_then(|s| s.api_url.clone()),
290            );
291            merge(
292                &mut settings.anthropic.available_models,
293                anthropic.as_ref().and_then(|s| s.available_models.clone()),
294            );
295
296            // Ollama
297            let ollama = value.ollama.clone();
298
299            merge(
300                &mut settings.ollama.api_url,
301                value.ollama.as_ref().and_then(|s| s.api_url.clone()),
302            );
303            merge(
304                &mut settings.ollama.available_models,
305                ollama.as_ref().and_then(|s| s.available_models.clone()),
306            );
307
308            // LM Studio
309            let lmstudio = value.lmstudio.clone();
310
311            merge(
312                &mut settings.lmstudio.api_url,
313                value.lmstudio.as_ref().and_then(|s| s.api_url.clone()),
314            );
315            merge(
316                &mut settings.lmstudio.available_models,
317                lmstudio.as_ref().and_then(|s| s.available_models.clone()),
318            );
319
320            // DeepSeek
321            let deepseek = value.deepseek.clone();
322
323            merge(
324                &mut settings.deepseek.api_url,
325                value.deepseek.as_ref().and_then(|s| s.api_url.clone()),
326            );
327            merge(
328                &mut settings.deepseek.available_models,
329                deepseek.as_ref().and_then(|s| s.available_models.clone()),
330            );
331
332            // OpenAI
333            let (openai, upgraded) = match value.openai.clone().map(|s| s.upgrade()) {
334                Some((content, upgraded)) => (Some(content), upgraded),
335                None => (None, false),
336            };
337
338            if upgraded {
339                settings.openai.needs_setting_migration = true;
340            }
341
342            merge(
343                &mut settings.openai.api_url,
344                openai.as_ref().and_then(|s| s.api_url.clone()),
345            );
346            merge(
347                &mut settings.openai.available_models,
348                openai.as_ref().and_then(|s| s.available_models.clone()),
349            );
350            merge(
351                &mut settings.zed_dot_dev.available_models,
352                value
353                    .zed_dot_dev
354                    .as_ref()
355                    .and_then(|s| s.available_models.clone()),
356            );
357            merge(
358                &mut settings.google.api_url,
359                value.google.as_ref().and_then(|s| s.api_url.clone()),
360            );
361            merge(
362                &mut settings.google.available_models,
363                value
364                    .google
365                    .as_ref()
366                    .and_then(|s| s.available_models.clone()),
367            );
368
369            // Mistral
370            let mistral = value.mistral.clone();
371            merge(
372                &mut settings.mistral.api_url,
373                mistral.as_ref().and_then(|s| s.api_url.clone()),
374            );
375            merge(
376                &mut settings.mistral.available_models,
377                mistral.as_ref().and_then(|s| s.available_models.clone()),
378            );
379        }
380
381        Ok(settings)
382    }
383}