settings.rs

  1use std::{sync::Arc, time::Duration};
  2
  3use anyhow::Result;
  4use gpui::AppContext;
  5use project::Fs;
  6use schemars::JsonSchema;
  7use serde::{Deserialize, Serialize};
  8use settings::{update_settings_file, Settings, SettingsSources};
  9
 10use crate::{
 11    provider::{
 12        self,
 13        anthropic::AnthropicSettings,
 14        cloud::{self, ZedDotDevSettings},
 15        copilot_chat::CopilotChatSettings,
 16        google::GoogleSettings,
 17        ollama::OllamaSettings,
 18        open_ai::OpenAiSettings,
 19    },
 20    LanguageModelCacheConfiguration,
 21};
 22
 23/// Initializes the language model settings.
 24pub fn init(fs: Arc<dyn Fs>, cx: &mut AppContext) {
 25    AllLanguageModelSettings::register(cx);
 26
 27    if AllLanguageModelSettings::get_global(cx)
 28        .openai
 29        .needs_setting_migration
 30    {
 31        update_settings_file::<AllLanguageModelSettings>(fs.clone(), cx, move |setting, _| {
 32            if let Some(settings) = setting.openai.clone() {
 33                let (newest_version, _) = settings.upgrade();
 34                setting.openai = Some(OpenAiSettingsContent::Versioned(
 35                    VersionedOpenAiSettingsContent::V1(newest_version),
 36                ));
 37            }
 38        });
 39    }
 40
 41    if AllLanguageModelSettings::get_global(cx)
 42        .anthropic
 43        .needs_setting_migration
 44    {
 45        update_settings_file::<AllLanguageModelSettings>(fs, cx, move |setting, _| {
 46            if let Some(settings) = setting.anthropic.clone() {
 47                let (newest_version, _) = settings.upgrade();
 48                setting.anthropic = Some(AnthropicSettingsContent::Versioned(
 49                    VersionedAnthropicSettingsContent::V1(newest_version),
 50                ));
 51            }
 52        });
 53    }
 54}
 55
 56#[derive(Default)]
 57pub struct AllLanguageModelSettings {
 58    pub anthropic: AnthropicSettings,
 59    pub ollama: OllamaSettings,
 60    pub openai: OpenAiSettings,
 61    pub zed_dot_dev: ZedDotDevSettings,
 62    pub google: GoogleSettings,
 63    pub copilot_chat: CopilotChatSettings,
 64}
 65
 66#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
 67pub struct AllLanguageModelSettingsContent {
 68    pub anthropic: Option<AnthropicSettingsContent>,
 69    pub ollama: Option<OllamaSettingsContent>,
 70    pub openai: Option<OpenAiSettingsContent>,
 71    #[serde(rename = "zed.dev")]
 72    pub zed_dot_dev: Option<ZedDotDevSettingsContent>,
 73    pub google: Option<GoogleSettingsContent>,
 74    pub copilot_chat: Option<CopilotChatSettingsContent>,
 75}
 76
 77#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
 78#[serde(untagged)]
 79pub enum AnthropicSettingsContent {
 80    Legacy(LegacyAnthropicSettingsContent),
 81    Versioned(VersionedAnthropicSettingsContent),
 82}
 83
 84impl AnthropicSettingsContent {
 85    pub fn upgrade(self) -> (AnthropicSettingsContentV1, bool) {
 86        match self {
 87            AnthropicSettingsContent::Legacy(content) => (
 88                AnthropicSettingsContentV1 {
 89                    api_url: content.api_url,
 90                    low_speed_timeout_in_seconds: content.low_speed_timeout_in_seconds,
 91                    available_models: content.available_models.map(|models| {
 92                        models
 93                            .into_iter()
 94                            .filter_map(|model| match model {
 95                                anthropic::Model::Custom {
 96                                    name,
 97                                    max_tokens,
 98                                    tool_override,
 99                                    cache_configuration,
100                                    max_output_tokens,
101                                } => Some(provider::anthropic::AvailableModel {
102                                    name,
103                                    max_tokens,
104                                    tool_override,
105                                    cache_configuration: cache_configuration.as_ref().map(
106                                        |config| LanguageModelCacheConfiguration {
107                                            max_cache_anchors: config.max_cache_anchors,
108                                            should_speculate: config.should_speculate,
109                                            min_total_token: config.min_total_token,
110                                        },
111                                    ),
112                                    max_output_tokens,
113                                }),
114                                _ => None,
115                            })
116                            .collect()
117                    }),
118                },
119                true,
120            ),
121            AnthropicSettingsContent::Versioned(content) => match content {
122                VersionedAnthropicSettingsContent::V1(content) => (content, false),
123            },
124        }
125    }
126}
127
128#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
129pub struct LegacyAnthropicSettingsContent {
130    pub api_url: Option<String>,
131    pub low_speed_timeout_in_seconds: Option<u64>,
132    pub available_models: Option<Vec<anthropic::Model>>,
133}
134
135#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
136#[serde(tag = "version")]
137pub enum VersionedAnthropicSettingsContent {
138    #[serde(rename = "1")]
139    V1(AnthropicSettingsContentV1),
140}
141
142#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
143pub struct AnthropicSettingsContentV1 {
144    pub api_url: Option<String>,
145    pub low_speed_timeout_in_seconds: Option<u64>,
146    pub available_models: Option<Vec<provider::anthropic::AvailableModel>>,
147}
148
149#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
150pub struct OllamaSettingsContent {
151    pub api_url: Option<String>,
152    pub low_speed_timeout_in_seconds: Option<u64>,
153}
154
155#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
156#[serde(untagged)]
157pub enum OpenAiSettingsContent {
158    Legacy(LegacyOpenAiSettingsContent),
159    Versioned(VersionedOpenAiSettingsContent),
160}
161
162impl OpenAiSettingsContent {
163    pub fn upgrade(self) -> (OpenAiSettingsContentV1, bool) {
164        match self {
165            OpenAiSettingsContent::Legacy(content) => (
166                OpenAiSettingsContentV1 {
167                    api_url: content.api_url,
168                    low_speed_timeout_in_seconds: content.low_speed_timeout_in_seconds,
169                    available_models: content.available_models.map(|models| {
170                        models
171                            .into_iter()
172                            .filter_map(|model| match model {
173                                open_ai::Model::Custom { name, max_tokens } => {
174                                    Some(provider::open_ai::AvailableModel { name, max_tokens })
175                                }
176                                _ => None,
177                            })
178                            .collect()
179                    }),
180                },
181                true,
182            ),
183            OpenAiSettingsContent::Versioned(content) => match content {
184                VersionedOpenAiSettingsContent::V1(content) => (content, false),
185            },
186        }
187    }
188}
189
190#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
191pub struct LegacyOpenAiSettingsContent {
192    pub api_url: Option<String>,
193    pub low_speed_timeout_in_seconds: Option<u64>,
194    pub available_models: Option<Vec<open_ai::Model>>,
195}
196
197#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
198#[serde(tag = "version")]
199pub enum VersionedOpenAiSettingsContent {
200    #[serde(rename = "1")]
201    V1(OpenAiSettingsContentV1),
202}
203
204#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
205pub struct OpenAiSettingsContentV1 {
206    pub api_url: Option<String>,
207    pub low_speed_timeout_in_seconds: Option<u64>,
208    pub available_models: Option<Vec<provider::open_ai::AvailableModel>>,
209}
210
211#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
212pub struct GoogleSettingsContent {
213    pub api_url: Option<String>,
214    pub low_speed_timeout_in_seconds: Option<u64>,
215    pub available_models: Option<Vec<provider::google::AvailableModel>>,
216}
217
218#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
219pub struct ZedDotDevSettingsContent {
220    available_models: Option<Vec<cloud::AvailableModel>>,
221}
222
223#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
224pub struct CopilotChatSettingsContent {
225    low_speed_timeout_in_seconds: Option<u64>,
226}
227
228impl settings::Settings for AllLanguageModelSettings {
229    const KEY: Option<&'static str> = Some("language_models");
230
231    const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
232
233    type FileContent = AllLanguageModelSettingsContent;
234
235    fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
236        fn merge<T>(target: &mut T, value: Option<T>) {
237            if let Some(value) = value {
238                *target = value;
239            }
240        }
241
242        let mut settings = AllLanguageModelSettings::default();
243
244        for value in sources.defaults_and_customizations() {
245            // Anthropic
246            let (anthropic, upgraded) = match value.anthropic.clone().map(|s| s.upgrade()) {
247                Some((content, upgraded)) => (Some(content), upgraded),
248                None => (None, false),
249            };
250
251            if upgraded {
252                settings.anthropic.needs_setting_migration = true;
253            }
254
255            merge(
256                &mut settings.anthropic.api_url,
257                anthropic.as_ref().and_then(|s| s.api_url.clone()),
258            );
259            if let Some(low_speed_timeout_in_seconds) = anthropic
260                .as_ref()
261                .and_then(|s| s.low_speed_timeout_in_seconds)
262            {
263                settings.anthropic.low_speed_timeout =
264                    Some(Duration::from_secs(low_speed_timeout_in_seconds));
265            }
266            merge(
267                &mut settings.anthropic.available_models,
268                anthropic.as_ref().and_then(|s| s.available_models.clone()),
269            );
270
271            merge(
272                &mut settings.ollama.api_url,
273                value.ollama.as_ref().and_then(|s| s.api_url.clone()),
274            );
275            if let Some(low_speed_timeout_in_seconds) = value
276                .ollama
277                .as_ref()
278                .and_then(|s| s.low_speed_timeout_in_seconds)
279            {
280                settings.ollama.low_speed_timeout =
281                    Some(Duration::from_secs(low_speed_timeout_in_seconds));
282            }
283
284            // OpenAI
285            let (openai, upgraded) = match value.openai.clone().map(|s| s.upgrade()) {
286                Some((content, upgraded)) => (Some(content), upgraded),
287                None => (None, false),
288            };
289
290            if upgraded {
291                settings.openai.needs_setting_migration = true;
292            }
293
294            merge(
295                &mut settings.openai.api_url,
296                openai.as_ref().and_then(|s| s.api_url.clone()),
297            );
298            if let Some(low_speed_timeout_in_seconds) =
299                openai.as_ref().and_then(|s| s.low_speed_timeout_in_seconds)
300            {
301                settings.openai.low_speed_timeout =
302                    Some(Duration::from_secs(low_speed_timeout_in_seconds));
303            }
304            merge(
305                &mut settings.openai.available_models,
306                openai.as_ref().and_then(|s| s.available_models.clone()),
307            );
308
309            merge(
310                &mut settings.zed_dot_dev.available_models,
311                value
312                    .zed_dot_dev
313                    .as_ref()
314                    .and_then(|s| s.available_models.clone()),
315            );
316
317            merge(
318                &mut settings.google.api_url,
319                value.google.as_ref().and_then(|s| s.api_url.clone()),
320            );
321            if let Some(low_speed_timeout_in_seconds) = value
322                .google
323                .as_ref()
324                .and_then(|s| s.low_speed_timeout_in_seconds)
325            {
326                settings.google.low_speed_timeout =
327                    Some(Duration::from_secs(low_speed_timeout_in_seconds));
328            }
329            merge(
330                &mut settings.google.available_models,
331                value
332                    .google
333                    .as_ref()
334                    .and_then(|s| s.available_models.clone()),
335            );
336
337            if let Some(low_speed_timeout) = value
338                .copilot_chat
339                .as_ref()
340                .and_then(|s| s.low_speed_timeout_in_seconds)
341            {
342                settings.copilot_chat.low_speed_timeout =
343                    Some(Duration::from_secs(low_speed_timeout));
344            }
345        }
346
347        Ok(settings)
348    }
349}