settings.rs

  1use std::{sync::Arc, time::Duration};
  2
  3use anyhow::Result;
  4use gpui::AppContext;
  5use project::Fs;
  6use schemars::JsonSchema;
  7use serde::{Deserialize, Serialize};
  8use settings::{update_settings_file, Settings, SettingsSources};
  9
 10use crate::{
 11    provider::{
 12        self,
 13        anthropic::AnthropicSettings,
 14        cloud::{self, ZedDotDevSettings},
 15        copilot_chat::CopilotChatSettings,
 16        google::GoogleSettings,
 17        ollama::OllamaSettings,
 18        open_ai::OpenAiSettings,
 19    },
 20    LanguageModelCacheConfiguration,
 21};
 22
 23/// Initializes the language model settings.
 24pub fn init(fs: Arc<dyn Fs>, cx: &mut AppContext) {
 25    AllLanguageModelSettings::register(cx);
 26
 27    if AllLanguageModelSettings::get_global(cx)
 28        .openai
 29        .needs_setting_migration
 30    {
 31        update_settings_file::<AllLanguageModelSettings>(fs.clone(), cx, move |setting, _| {
 32            if let Some(settings) = setting.openai.clone() {
 33                let (newest_version, _) = settings.upgrade();
 34                setting.openai = Some(OpenAiSettingsContent::Versioned(
 35                    VersionedOpenAiSettingsContent::V1(newest_version),
 36                ));
 37            }
 38        });
 39    }
 40
 41    if AllLanguageModelSettings::get_global(cx)
 42        .anthropic
 43        .needs_setting_migration
 44    {
 45        update_settings_file::<AllLanguageModelSettings>(fs, cx, move |setting, _| {
 46            if let Some(settings) = setting.anthropic.clone() {
 47                let (newest_version, _) = settings.upgrade();
 48                setting.anthropic = Some(AnthropicSettingsContent::Versioned(
 49                    VersionedAnthropicSettingsContent::V1(newest_version),
 50                ));
 51            }
 52        });
 53    }
 54}
 55
 56#[derive(Default)]
 57pub struct AllLanguageModelSettings {
 58    pub anthropic: AnthropicSettings,
 59    pub ollama: OllamaSettings,
 60    pub openai: OpenAiSettings,
 61    pub zed_dot_dev: ZedDotDevSettings,
 62    pub google: GoogleSettings,
 63    pub copilot_chat: CopilotChatSettings,
 64}
 65
 66#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
 67pub struct AllLanguageModelSettingsContent {
 68    pub anthropic: Option<AnthropicSettingsContent>,
 69    pub ollama: Option<OllamaSettingsContent>,
 70    pub openai: Option<OpenAiSettingsContent>,
 71    #[serde(rename = "zed.dev")]
 72    pub zed_dot_dev: Option<ZedDotDevSettingsContent>,
 73    pub google: Option<GoogleSettingsContent>,
 74    pub copilot_chat: Option<CopilotChatSettingsContent>,
 75}
 76
 77#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
 78#[serde(untagged)]
 79pub enum AnthropicSettingsContent {
 80    Legacy(LegacyAnthropicSettingsContent),
 81    Versioned(VersionedAnthropicSettingsContent),
 82}
 83
 84impl AnthropicSettingsContent {
 85    pub fn upgrade(self) -> (AnthropicSettingsContentV1, bool) {
 86        match self {
 87            AnthropicSettingsContent::Legacy(content) => (
 88                AnthropicSettingsContentV1 {
 89                    api_url: content.api_url,
 90                    low_speed_timeout_in_seconds: content.low_speed_timeout_in_seconds,
 91                    available_models: content.available_models.map(|models| {
 92                        models
 93                            .into_iter()
 94                            .filter_map(|model| match model {
 95                                anthropic::Model::Custom {
 96                                    name,
 97                                    max_tokens,
 98                                    tool_override,
 99                                    cache_configuration,
100                                } => Some(provider::anthropic::AvailableModel {
101                                    name,
102                                    max_tokens,
103                                    tool_override,
104                                    cache_configuration: cache_configuration.as_ref().map(
105                                        |config| LanguageModelCacheConfiguration {
106                                            max_cache_anchors: config.max_cache_anchors,
107                                            should_speculate: config.should_speculate,
108                                            min_total_token: config.min_total_token,
109                                        },
110                                    ),
111                                }),
112                                _ => None,
113                            })
114                            .collect()
115                    }),
116                },
117                true,
118            ),
119            AnthropicSettingsContent::Versioned(content) => match content {
120                VersionedAnthropicSettingsContent::V1(content) => (content, false),
121            },
122        }
123    }
124}
125
126#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
127pub struct LegacyAnthropicSettingsContent {
128    pub api_url: Option<String>,
129    pub low_speed_timeout_in_seconds: Option<u64>,
130    pub available_models: Option<Vec<anthropic::Model>>,
131}
132
133#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
134#[serde(tag = "version")]
135pub enum VersionedAnthropicSettingsContent {
136    #[serde(rename = "1")]
137    V1(AnthropicSettingsContentV1),
138}
139
140#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
141pub struct AnthropicSettingsContentV1 {
142    pub api_url: Option<String>,
143    pub low_speed_timeout_in_seconds: Option<u64>,
144    pub available_models: Option<Vec<provider::anthropic::AvailableModel>>,
145}
146
147#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
148pub struct OllamaSettingsContent {
149    pub api_url: Option<String>,
150    pub low_speed_timeout_in_seconds: Option<u64>,
151}
152
153#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
154#[serde(untagged)]
155pub enum OpenAiSettingsContent {
156    Legacy(LegacyOpenAiSettingsContent),
157    Versioned(VersionedOpenAiSettingsContent),
158}
159
160impl OpenAiSettingsContent {
161    pub fn upgrade(self) -> (OpenAiSettingsContentV1, bool) {
162        match self {
163            OpenAiSettingsContent::Legacy(content) => (
164                OpenAiSettingsContentV1 {
165                    api_url: content.api_url,
166                    low_speed_timeout_in_seconds: content.low_speed_timeout_in_seconds,
167                    available_models: content.available_models.map(|models| {
168                        models
169                            .into_iter()
170                            .filter_map(|model| match model {
171                                open_ai::Model::Custom { name, max_tokens } => {
172                                    Some(provider::open_ai::AvailableModel { name, max_tokens })
173                                }
174                                _ => None,
175                            })
176                            .collect()
177                    }),
178                },
179                true,
180            ),
181            OpenAiSettingsContent::Versioned(content) => match content {
182                VersionedOpenAiSettingsContent::V1(content) => (content, false),
183            },
184        }
185    }
186}
187
188#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
189pub struct LegacyOpenAiSettingsContent {
190    pub api_url: Option<String>,
191    pub low_speed_timeout_in_seconds: Option<u64>,
192    pub available_models: Option<Vec<open_ai::Model>>,
193}
194
195#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
196#[serde(tag = "version")]
197pub enum VersionedOpenAiSettingsContent {
198    #[serde(rename = "1")]
199    V1(OpenAiSettingsContentV1),
200}
201
202#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
203pub struct OpenAiSettingsContentV1 {
204    pub api_url: Option<String>,
205    pub low_speed_timeout_in_seconds: Option<u64>,
206    pub available_models: Option<Vec<provider::open_ai::AvailableModel>>,
207}
208
209#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
210pub struct GoogleSettingsContent {
211    pub api_url: Option<String>,
212    pub low_speed_timeout_in_seconds: Option<u64>,
213    pub available_models: Option<Vec<provider::google::AvailableModel>>,
214}
215
216#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
217pub struct ZedDotDevSettingsContent {
218    available_models: Option<Vec<cloud::AvailableModel>>,
219}
220
221#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
222pub struct CopilotChatSettingsContent {
223    low_speed_timeout_in_seconds: Option<u64>,
224}
225
226impl settings::Settings for AllLanguageModelSettings {
227    const KEY: Option<&'static str> = Some("language_models");
228
229    const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
230
231    type FileContent = AllLanguageModelSettingsContent;
232
233    fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
234        fn merge<T>(target: &mut T, value: Option<T>) {
235            if let Some(value) = value {
236                *target = value;
237            }
238        }
239
240        let mut settings = AllLanguageModelSettings::default();
241
242        for value in sources.defaults_and_customizations() {
243            // Anthropic
244            let (anthropic, upgraded) = match value.anthropic.clone().map(|s| s.upgrade()) {
245                Some((content, upgraded)) => (Some(content), upgraded),
246                None => (None, false),
247            };
248
249            if upgraded {
250                settings.anthropic.needs_setting_migration = true;
251            }
252
253            merge(
254                &mut settings.anthropic.api_url,
255                anthropic.as_ref().and_then(|s| s.api_url.clone()),
256            );
257            if let Some(low_speed_timeout_in_seconds) = anthropic
258                .as_ref()
259                .and_then(|s| s.low_speed_timeout_in_seconds)
260            {
261                settings.anthropic.low_speed_timeout =
262                    Some(Duration::from_secs(low_speed_timeout_in_seconds));
263            }
264            merge(
265                &mut settings.anthropic.available_models,
266                anthropic.as_ref().and_then(|s| s.available_models.clone()),
267            );
268
269            merge(
270                &mut settings.ollama.api_url,
271                value.ollama.as_ref().and_then(|s| s.api_url.clone()),
272            );
273            if let Some(low_speed_timeout_in_seconds) = value
274                .ollama
275                .as_ref()
276                .and_then(|s| s.low_speed_timeout_in_seconds)
277            {
278                settings.ollama.low_speed_timeout =
279                    Some(Duration::from_secs(low_speed_timeout_in_seconds));
280            }
281
282            // OpenAI
283            let (openai, upgraded) = match value.openai.clone().map(|s| s.upgrade()) {
284                Some((content, upgraded)) => (Some(content), upgraded),
285                None => (None, false),
286            };
287
288            if upgraded {
289                settings.openai.needs_setting_migration = true;
290            }
291
292            merge(
293                &mut settings.openai.api_url,
294                openai.as_ref().and_then(|s| s.api_url.clone()),
295            );
296            if let Some(low_speed_timeout_in_seconds) =
297                openai.as_ref().and_then(|s| s.low_speed_timeout_in_seconds)
298            {
299                settings.openai.low_speed_timeout =
300                    Some(Duration::from_secs(low_speed_timeout_in_seconds));
301            }
302            merge(
303                &mut settings.openai.available_models,
304                openai.as_ref().and_then(|s| s.available_models.clone()),
305            );
306
307            merge(
308                &mut settings.zed_dot_dev.available_models,
309                value
310                    .zed_dot_dev
311                    .as_ref()
312                    .and_then(|s| s.available_models.clone()),
313            );
314
315            merge(
316                &mut settings.google.api_url,
317                value.google.as_ref().and_then(|s| s.api_url.clone()),
318            );
319            if let Some(low_speed_timeout_in_seconds) = value
320                .google
321                .as_ref()
322                .and_then(|s| s.low_speed_timeout_in_seconds)
323            {
324                settings.google.low_speed_timeout =
325                    Some(Duration::from_secs(low_speed_timeout_in_seconds));
326            }
327            merge(
328                &mut settings.google.available_models,
329                value
330                    .google
331                    .as_ref()
332                    .and_then(|s| s.available_models.clone()),
333            );
334
335            if let Some(low_speed_timeout) = value
336                .copilot_chat
337                .as_ref()
338                .and_then(|s| s.low_speed_timeout_in_seconds)
339            {
340                settings.copilot_chat.low_speed_timeout =
341                    Some(Duration::from_secs(low_speed_timeout));
342            }
343        }
344
345        Ok(settings)
346    }
347}