settings.rs

  1use std::sync::Arc;
  2
  3use anyhow::Result;
  4use gpui::App;
  5use language_model::LanguageModelCacheConfiguration;
  6use project::Fs;
  7use schemars::JsonSchema;
  8use serde::{Deserialize, Serialize};
  9use settings::{update_settings_file, Settings, SettingsSources};
 10
 11use crate::provider::{
 12    self,
 13    anthropic::AnthropicSettings,
 14    cloud::{self, ZedDotDevSettings},
 15    copilot_chat::CopilotChatSettings,
 16    deepseek::DeepSeekSettings,
 17    google::GoogleSettings,
 18    lmstudio::LmStudioSettings,
 19    ollama::OllamaSettings,
 20    open_ai::OpenAiSettings,
 21};
 22
 23/// Initializes the language model settings.
 24pub fn init(fs: Arc<dyn Fs>, cx: &mut App) {
 25    AllLanguageModelSettings::register(cx);
 26
 27    if AllLanguageModelSettings::get_global(cx)
 28        .openai
 29        .needs_setting_migration
 30    {
 31        update_settings_file::<AllLanguageModelSettings>(fs.clone(), cx, move |setting, _| {
 32            if let Some(settings) = setting.openai.clone() {
 33                let (newest_version, _) = settings.upgrade();
 34                setting.openai = Some(OpenAiSettingsContent::Versioned(
 35                    VersionedOpenAiSettingsContent::V1(newest_version),
 36                ));
 37            }
 38        });
 39    }
 40
 41    if AllLanguageModelSettings::get_global(cx)
 42        .anthropic
 43        .needs_setting_migration
 44    {
 45        update_settings_file::<AllLanguageModelSettings>(fs, cx, move |setting, _| {
 46            if let Some(settings) = setting.anthropic.clone() {
 47                let (newest_version, _) = settings.upgrade();
 48                setting.anthropic = Some(AnthropicSettingsContent::Versioned(
 49                    VersionedAnthropicSettingsContent::V1(newest_version),
 50                ));
 51            }
 52        });
 53    }
 54}
 55
 56#[derive(Default)]
 57pub struct AllLanguageModelSettings {
 58    pub anthropic: AnthropicSettings,
 59    pub ollama: OllamaSettings,
 60    pub openai: OpenAiSettings,
 61    pub zed_dot_dev: ZedDotDevSettings,
 62    pub google: GoogleSettings,
 63    pub copilot_chat: CopilotChatSettings,
 64    pub lmstudio: LmStudioSettings,
 65    pub deepseek: DeepSeekSettings,
 66}
 67
 68#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
 69pub struct AllLanguageModelSettingsContent {
 70    pub anthropic: Option<AnthropicSettingsContent>,
 71    pub ollama: Option<OllamaSettingsContent>,
 72    pub lmstudio: Option<LmStudioSettingsContent>,
 73    pub openai: Option<OpenAiSettingsContent>,
 74    #[serde(rename = "zed.dev")]
 75    pub zed_dot_dev: Option<ZedDotDevSettingsContent>,
 76    pub google: Option<GoogleSettingsContent>,
 77    pub deepseek: Option<DeepseekSettingsContent>,
 78    pub copilot_chat: Option<CopilotChatSettingsContent>,
 79}
 80
 81#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
 82#[serde(untagged)]
 83pub enum AnthropicSettingsContent {
 84    Legacy(LegacyAnthropicSettingsContent),
 85    Versioned(VersionedAnthropicSettingsContent),
 86}
 87
 88impl AnthropicSettingsContent {
 89    pub fn upgrade(self) -> (AnthropicSettingsContentV1, bool) {
 90        match self {
 91            AnthropicSettingsContent::Legacy(content) => (
 92                AnthropicSettingsContentV1 {
 93                    api_url: content.api_url,
 94                    available_models: content.available_models.map(|models| {
 95                        models
 96                            .into_iter()
 97                            .filter_map(|model| match model {
 98                                anthropic::Model::Custom {
 99                                    name,
100                                    display_name,
101                                    max_tokens,
102                                    tool_override,
103                                    cache_configuration,
104                                    max_output_tokens,
105                                    default_temperature,
106                                    extra_beta_headers,
107                                } => Some(provider::anthropic::AvailableModel {
108                                    name,
109                                    display_name,
110                                    max_tokens,
111                                    tool_override,
112                                    cache_configuration: cache_configuration.as_ref().map(
113                                        |config| LanguageModelCacheConfiguration {
114                                            max_cache_anchors: config.max_cache_anchors,
115                                            should_speculate: config.should_speculate,
116                                            min_total_token: config.min_total_token,
117                                        },
118                                    ),
119                                    max_output_tokens,
120                                    default_temperature,
121                                    extra_beta_headers,
122                                }),
123                                _ => None,
124                            })
125                            .collect()
126                    }),
127                },
128                true,
129            ),
130            AnthropicSettingsContent::Versioned(content) => match content {
131                VersionedAnthropicSettingsContent::V1(content) => (content, false),
132            },
133        }
134    }
135}
136
137#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
138pub struct LegacyAnthropicSettingsContent {
139    pub api_url: Option<String>,
140    pub available_models: Option<Vec<anthropic::Model>>,
141}
142
143#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
144#[serde(tag = "version")]
145pub enum VersionedAnthropicSettingsContent {
146    #[serde(rename = "1")]
147    V1(AnthropicSettingsContentV1),
148}
149
150#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
151pub struct AnthropicSettingsContentV1 {
152    pub api_url: Option<String>,
153    pub available_models: Option<Vec<provider::anthropic::AvailableModel>>,
154}
155
156#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
157pub struct OllamaSettingsContent {
158    pub api_url: Option<String>,
159    pub available_models: Option<Vec<provider::ollama::AvailableModel>>,
160}
161
162#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
163pub struct LmStudioSettingsContent {
164    pub api_url: Option<String>,
165    pub available_models: Option<Vec<provider::lmstudio::AvailableModel>>,
166}
167
168#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
169pub struct DeepseekSettingsContent {
170    pub api_url: Option<String>,
171    pub available_models: Option<Vec<provider::deepseek::AvailableModel>>,
172}
173
174#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
175#[serde(untagged)]
176pub enum OpenAiSettingsContent {
177    Legacy(LegacyOpenAiSettingsContent),
178    Versioned(VersionedOpenAiSettingsContent),
179}
180
181impl OpenAiSettingsContent {
182    pub fn upgrade(self) -> (OpenAiSettingsContentV1, bool) {
183        match self {
184            OpenAiSettingsContent::Legacy(content) => (
185                OpenAiSettingsContentV1 {
186                    api_url: content.api_url,
187                    available_models: content.available_models.map(|models| {
188                        models
189                            .into_iter()
190                            .filter_map(|model| match model {
191                                open_ai::Model::Custom {
192                                    name,
193                                    display_name,
194                                    max_tokens,
195                                    max_output_tokens,
196                                    max_completion_tokens,
197                                } => Some(provider::open_ai::AvailableModel {
198                                    name,
199                                    max_tokens,
200                                    max_output_tokens,
201                                    display_name,
202                                    max_completion_tokens,
203                                }),
204                                _ => None,
205                            })
206                            .collect()
207                    }),
208                },
209                true,
210            ),
211            OpenAiSettingsContent::Versioned(content) => match content {
212                VersionedOpenAiSettingsContent::V1(content) => (content, false),
213            },
214        }
215    }
216}
217
218#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
219pub struct LegacyOpenAiSettingsContent {
220    pub api_url: Option<String>,
221    pub available_models: Option<Vec<open_ai::Model>>,
222}
223
224#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
225#[serde(tag = "version")]
226pub enum VersionedOpenAiSettingsContent {
227    #[serde(rename = "1")]
228    V1(OpenAiSettingsContentV1),
229}
230
231#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
232pub struct OpenAiSettingsContentV1 {
233    pub api_url: Option<String>,
234    pub available_models: Option<Vec<provider::open_ai::AvailableModel>>,
235}
236
237#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
238pub struct GoogleSettingsContent {
239    pub api_url: Option<String>,
240    pub available_models: Option<Vec<provider::google::AvailableModel>>,
241}
242
243#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
244pub struct ZedDotDevSettingsContent {
245    available_models: Option<Vec<cloud::AvailableModel>>,
246}
247
248#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
249pub struct CopilotChatSettingsContent {}
250
251impl settings::Settings for AllLanguageModelSettings {
252    const KEY: Option<&'static str> = Some("language_models");
253
254    const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
255
256    type FileContent = AllLanguageModelSettingsContent;
257
258    fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
259        fn merge<T>(target: &mut T, value: Option<T>) {
260            if let Some(value) = value {
261                *target = value;
262            }
263        }
264
265        let mut settings = AllLanguageModelSettings::default();
266
267        for value in sources.defaults_and_customizations() {
268            // Anthropic
269            let (anthropic, upgraded) = match value.anthropic.clone().map(|s| s.upgrade()) {
270                Some((content, upgraded)) => (Some(content), upgraded),
271                None => (None, false),
272            };
273
274            if upgraded {
275                settings.anthropic.needs_setting_migration = true;
276            }
277
278            merge(
279                &mut settings.anthropic.api_url,
280                anthropic.as_ref().and_then(|s| s.api_url.clone()),
281            );
282            merge(
283                &mut settings.anthropic.available_models,
284                anthropic.as_ref().and_then(|s| s.available_models.clone()),
285            );
286
287            // Ollama
288            let ollama = value.ollama.clone();
289
290            merge(
291                &mut settings.ollama.api_url,
292                value.ollama.as_ref().and_then(|s| s.api_url.clone()),
293            );
294            merge(
295                &mut settings.ollama.available_models,
296                ollama.as_ref().and_then(|s| s.available_models.clone()),
297            );
298
299            // LM Studio
300            let lmstudio = value.lmstudio.clone();
301
302            merge(
303                &mut settings.lmstudio.api_url,
304                value.lmstudio.as_ref().and_then(|s| s.api_url.clone()),
305            );
306            merge(
307                &mut settings.lmstudio.available_models,
308                lmstudio.as_ref().and_then(|s| s.available_models.clone()),
309            );
310
311            // DeepSeek
312            let deepseek = value.deepseek.clone();
313
314            merge(
315                &mut settings.deepseek.api_url,
316                value.deepseek.as_ref().and_then(|s| s.api_url.clone()),
317            );
318            merge(
319                &mut settings.deepseek.available_models,
320                deepseek.as_ref().and_then(|s| s.available_models.clone()),
321            );
322
323            // OpenAI
324            let (openai, upgraded) = match value.openai.clone().map(|s| s.upgrade()) {
325                Some((content, upgraded)) => (Some(content), upgraded),
326                None => (None, false),
327            };
328
329            if upgraded {
330                settings.openai.needs_setting_migration = true;
331            }
332
333            merge(
334                &mut settings.openai.api_url,
335                openai.as_ref().and_then(|s| s.api_url.clone()),
336            );
337            merge(
338                &mut settings.openai.available_models,
339                openai.as_ref().and_then(|s| s.available_models.clone()),
340            );
341            merge(
342                &mut settings.zed_dot_dev.available_models,
343                value
344                    .zed_dot_dev
345                    .as_ref()
346                    .and_then(|s| s.available_models.clone()),
347            );
348            merge(
349                &mut settings.google.api_url,
350                value.google.as_ref().and_then(|s| s.api_url.clone()),
351            );
352            merge(
353                &mut settings.google.available_models,
354                value
355                    .google
356                    .as_ref()
357                    .and_then(|s| s.available_models.clone()),
358            );
359        }
360
361        Ok(settings)
362    }
363}