assistant_settings.rs

  1use std::sync::Arc;
  2
  3use anthropic::Model as AnthropicModel;
  4use fs::Fs;
  5use gpui::{AppContext, Pixels};
  6use language_model::{settings::AllLanguageModelSettings, CloudModel, LanguageModel};
  7use ollama::Model as OllamaModel;
  8use open_ai::Model as OpenAiModel;
  9use schemars::{schema::Schema, JsonSchema};
 10use serde::{Deserialize, Serialize};
 11use settings::{update_settings_file, Settings, SettingsSources};
 12
 13#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
 14#[serde(rename_all = "snake_case")]
 15pub enum AssistantDockPosition {
 16    Left,
 17    #[default]
 18    Right,
 19    Bottom,
 20}
 21
 22#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
 23#[serde(tag = "name", rename_all = "snake_case")]
 24pub enum AssistantProviderContentV1 {
 25    #[serde(rename = "zed.dev")]
 26    ZedDotDev { default_model: Option<CloudModel> },
 27    #[serde(rename = "openai")]
 28    OpenAi {
 29        default_model: Option<OpenAiModel>,
 30        api_url: Option<String>,
 31        low_speed_timeout_in_seconds: Option<u64>,
 32        available_models: Option<Vec<OpenAiModel>>,
 33    },
 34    #[serde(rename = "anthropic")]
 35    Anthropic {
 36        default_model: Option<AnthropicModel>,
 37        api_url: Option<String>,
 38        low_speed_timeout_in_seconds: Option<u64>,
 39    },
 40    #[serde(rename = "ollama")]
 41    Ollama {
 42        default_model: Option<OllamaModel>,
 43        api_url: Option<String>,
 44        low_speed_timeout_in_seconds: Option<u64>,
 45    },
 46}
 47
 48#[derive(Debug, Default)]
 49pub struct AssistantSettings {
 50    pub enabled: bool,
 51    pub button: bool,
 52    pub dock: AssistantDockPosition,
 53    pub default_width: Pixels,
 54    pub default_height: Pixels,
 55    pub default_model: AssistantDefaultModel,
 56    pub using_outdated_settings_version: bool,
 57}
 58
 59/// Assistant panel settings
 60#[derive(Clone, Serialize, Deserialize, Debug)]
 61#[serde(untagged)]
 62pub enum AssistantSettingsContent {
 63    Versioned(VersionedAssistantSettingsContent),
 64    Legacy(LegacyAssistantSettingsContent),
 65}
 66
 67impl JsonSchema for AssistantSettingsContent {
 68    fn schema_name() -> String {
 69        VersionedAssistantSettingsContent::schema_name()
 70    }
 71
 72    fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> Schema {
 73        VersionedAssistantSettingsContent::json_schema(gen)
 74    }
 75
 76    fn is_referenceable() -> bool {
 77        VersionedAssistantSettingsContent::is_referenceable()
 78    }
 79}
 80
 81impl Default for AssistantSettingsContent {
 82    fn default() -> Self {
 83        Self::Versioned(VersionedAssistantSettingsContent::default())
 84    }
 85}
 86
 87impl AssistantSettingsContent {
 88    pub fn is_version_outdated(&self) -> bool {
 89        match self {
 90            AssistantSettingsContent::Versioned(settings) => match settings {
 91                VersionedAssistantSettingsContent::V1(_) => true,
 92                VersionedAssistantSettingsContent::V2(_) => false,
 93            },
 94            AssistantSettingsContent::Legacy(_) => true,
 95        }
 96    }
 97
 98    pub fn update_file(&mut self, fs: Arc<dyn Fs>, cx: &AppContext) {
 99        if let AssistantSettingsContent::Versioned(settings) = self {
100            if let VersionedAssistantSettingsContent::V1(settings) = settings {
101                if let Some(provider) = settings.provider.clone() {
102                    match provider {
103                        AssistantProviderContentV1::Anthropic {
104                            api_url,
105                            low_speed_timeout_in_seconds,
106                            ..
107                        } => update_settings_file::<AllLanguageModelSettings>(
108                            fs,
109                            cx,
110                            move |content, _| {
111                                if content.anthropic.is_none() {
112                                    content.anthropic =
113                                        Some(language_model::settings::AnthropicSettingsContent {
114                                            api_url,
115                                            low_speed_timeout_in_seconds,
116                                            ..Default::default()
117                                        });
118                                }
119                            },
120                        ),
121                        AssistantProviderContentV1::Ollama {
122                            api_url,
123                            low_speed_timeout_in_seconds,
124                            ..
125                        } => update_settings_file::<AllLanguageModelSettings>(
126                            fs,
127                            cx,
128                            move |content, _| {
129                                if content.ollama.is_none() {
130                                    content.ollama =
131                                        Some(language_model::settings::OllamaSettingsContent {
132                                            api_url,
133                                            low_speed_timeout_in_seconds,
134                                        });
135                                }
136                            },
137                        ),
138                        AssistantProviderContentV1::OpenAi {
139                            api_url,
140                            low_speed_timeout_in_seconds,
141                            available_models,
142                            ..
143                        } => update_settings_file::<AllLanguageModelSettings>(
144                            fs,
145                            cx,
146                            move |content, _| {
147                                if content.openai.is_none() {
148                                    content.openai =
149                                        Some(language_model::settings::OpenAiSettingsContent {
150                                            api_url,
151                                            low_speed_timeout_in_seconds,
152                                            available_models,
153                                        });
154                                }
155                            },
156                        ),
157                        _ => {}
158                    }
159                }
160            }
161        }
162
163        *self = AssistantSettingsContent::Versioned(VersionedAssistantSettingsContent::V2(
164            self.upgrade(),
165        ));
166    }
167
168    fn upgrade(&self) -> AssistantSettingsContentV2 {
169        match self {
170            AssistantSettingsContent::Versioned(settings) => match settings {
171                VersionedAssistantSettingsContent::V1(settings) => AssistantSettingsContentV2 {
172                    enabled: settings.enabled,
173                    button: settings.button,
174                    dock: settings.dock,
175                    default_width: settings.default_width,
176                    default_height: settings.default_width,
177                    default_model: settings
178                        .provider
179                        .clone()
180                        .and_then(|provider| match provider {
181                            AssistantProviderContentV1::ZedDotDev { default_model } => {
182                                default_model.map(|model| AssistantDefaultModel {
183                                    provider: "zed.dev".to_string(),
184                                    model: model.id().to_string(),
185                                })
186                            }
187                            AssistantProviderContentV1::OpenAi { default_model, .. } => {
188                                default_model.map(|model| AssistantDefaultModel {
189                                    provider: "openai".to_string(),
190                                    model: model.id().to_string(),
191                                })
192                            }
193                            AssistantProviderContentV1::Anthropic { default_model, .. } => {
194                                default_model.map(|model| AssistantDefaultModel {
195                                    provider: "anthropic".to_string(),
196                                    model: model.id().to_string(),
197                                })
198                            }
199                            AssistantProviderContentV1::Ollama { default_model, .. } => {
200                                default_model.map(|model| AssistantDefaultModel {
201                                    provider: "ollama".to_string(),
202                                    model: model.id().to_string(),
203                                })
204                            }
205                        }),
206                },
207                VersionedAssistantSettingsContent::V2(settings) => settings.clone(),
208            },
209            AssistantSettingsContent::Legacy(settings) => AssistantSettingsContentV2 {
210                enabled: None,
211                button: settings.button,
212                dock: settings.dock,
213                default_width: settings.default_width,
214                default_height: settings.default_height,
215                default_model: Some(AssistantDefaultModel {
216                    provider: "openai".to_string(),
217                    model: settings
218                        .default_open_ai_model
219                        .clone()
220                        .unwrap_or_default()
221                        .id()
222                        .to_string(),
223                }),
224            },
225        }
226    }
227
228    pub fn set_dock(&mut self, dock: AssistantDockPosition) {
229        match self {
230            AssistantSettingsContent::Versioned(settings) => match settings {
231                VersionedAssistantSettingsContent::V1(settings) => {
232                    settings.dock = Some(dock);
233                }
234                VersionedAssistantSettingsContent::V2(settings) => {
235                    settings.dock = Some(dock);
236                }
237            },
238            AssistantSettingsContent::Legacy(settings) => {
239                settings.dock = Some(dock);
240            }
241        }
242    }
243
244    pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
245        let model = language_model.id().0.to_string();
246        let provider = language_model.provider_id().0.to_string();
247
248        match self {
249            AssistantSettingsContent::Versioned(settings) => match settings {
250                VersionedAssistantSettingsContent::V1(settings) => match provider.as_ref() {
251                    "zed.dev" => {
252                        settings.provider = Some(AssistantProviderContentV1::ZedDotDev {
253                            default_model: CloudModel::from_id(&model).ok(),
254                        });
255                    }
256                    "anthropic" => {
257                        let (api_url, low_speed_timeout_in_seconds) = match &settings.provider {
258                            Some(AssistantProviderContentV1::Anthropic {
259                                api_url,
260                                low_speed_timeout_in_seconds,
261                                ..
262                            }) => (api_url.clone(), *low_speed_timeout_in_seconds),
263                            _ => (None, None),
264                        };
265                        settings.provider = Some(AssistantProviderContentV1::Anthropic {
266                            default_model: AnthropicModel::from_id(&model).ok(),
267                            api_url,
268                            low_speed_timeout_in_seconds,
269                        });
270                    }
271                    "ollama" => {
272                        let (api_url, low_speed_timeout_in_seconds) = match &settings.provider {
273                            Some(AssistantProviderContentV1::Ollama {
274                                api_url,
275                                low_speed_timeout_in_seconds,
276                                ..
277                            }) => (api_url.clone(), *low_speed_timeout_in_seconds),
278                            _ => (None, None),
279                        };
280                        settings.provider = Some(AssistantProviderContentV1::Ollama {
281                            default_model: Some(ollama::Model::new(&model)),
282                            api_url,
283                            low_speed_timeout_in_seconds,
284                        });
285                    }
286                    "openai" => {
287                        let (api_url, low_speed_timeout_in_seconds, available_models) =
288                            match &settings.provider {
289                                Some(AssistantProviderContentV1::OpenAi {
290                                    api_url,
291                                    low_speed_timeout_in_seconds,
292                                    available_models,
293                                    ..
294                                }) => (
295                                    api_url.clone(),
296                                    *low_speed_timeout_in_seconds,
297                                    available_models.clone(),
298                                ),
299                                _ => (None, None, None),
300                            };
301                        settings.provider = Some(AssistantProviderContentV1::OpenAi {
302                            default_model: open_ai::Model::from_id(&model).ok(),
303                            api_url,
304                            low_speed_timeout_in_seconds,
305                            available_models,
306                        });
307                    }
308                    _ => {}
309                },
310                VersionedAssistantSettingsContent::V2(settings) => {
311                    settings.default_model = Some(AssistantDefaultModel { provider, model });
312                }
313            },
314            AssistantSettingsContent::Legacy(settings) => {
315                if let Ok(model) = open_ai::Model::from_id(&language_model.id().0) {
316                    settings.default_open_ai_model = Some(model);
317                }
318            }
319        }
320    }
321}
322
323#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
324#[serde(tag = "version")]
325pub enum VersionedAssistantSettingsContent {
326    #[serde(rename = "1")]
327    V1(AssistantSettingsContentV1),
328    #[serde(rename = "2")]
329    V2(AssistantSettingsContentV2),
330}
331
332impl Default for VersionedAssistantSettingsContent {
333    fn default() -> Self {
334        Self::V2(AssistantSettingsContentV2 {
335            enabled: None,
336            button: None,
337            dock: None,
338            default_width: None,
339            default_height: None,
340            default_model: None,
341        })
342    }
343}
344
345#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
346pub struct AssistantSettingsContentV2 {
347    /// Whether the Assistant is enabled.
348    ///
349    /// Default: true
350    enabled: Option<bool>,
351    /// Whether to show the assistant panel button in the status bar.
352    ///
353    /// Default: true
354    button: Option<bool>,
355    /// Where to dock the assistant.
356    ///
357    /// Default: right
358    dock: Option<AssistantDockPosition>,
359    /// Default width in pixels when the assistant is docked to the left or right.
360    ///
361    /// Default: 640
362    default_width: Option<f32>,
363    /// Default height in pixels when the assistant is docked to the bottom.
364    ///
365    /// Default: 320
366    default_height: Option<f32>,
367    /// The default model to use when creating new contexts.
368    default_model: Option<AssistantDefaultModel>,
369}
370
371#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
372pub struct AssistantDefaultModel {
373    #[schemars(schema_with = "providers_schema")]
374    pub provider: String,
375    pub model: String,
376}
377
378fn providers_schema(_: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
379    schemars::schema::SchemaObject {
380        enum_values: Some(vec![
381            "anthropic".into(),
382            "ollama".into(),
383            "openai".into(),
384            "zed.dev".into(),
385        ]),
386        ..Default::default()
387    }
388    .into()
389}
390
391impl Default for AssistantDefaultModel {
392    fn default() -> Self {
393        Self {
394            provider: "openai".to_string(),
395            model: "gpt-4".to_string(),
396        }
397    }
398}
399
400#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
401pub struct AssistantSettingsContentV1 {
402    /// Whether the Assistant is enabled.
403    ///
404    /// Default: true
405    enabled: Option<bool>,
406    /// Whether to show the assistant panel button in the status bar.
407    ///
408    /// Default: true
409    button: Option<bool>,
410    /// Where to dock the assistant.
411    ///
412    /// Default: right
413    dock: Option<AssistantDockPosition>,
414    /// Default width in pixels when the assistant is docked to the left or right.
415    ///
416    /// Default: 640
417    default_width: Option<f32>,
418    /// Default height in pixels when the assistant is docked to the bottom.
419    ///
420    /// Default: 320
421    default_height: Option<f32>,
422    /// The provider of the assistant service.
423    ///
424    /// This can either be the internal `zed.dev` service or an external `openai` service,
425    /// each with their respective default models and configurations.
426    provider: Option<AssistantProviderContentV1>,
427}
428
429#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
430pub struct LegacyAssistantSettingsContent {
431    /// Whether to show the assistant panel button in the status bar.
432    ///
433    /// Default: true
434    pub button: Option<bool>,
435    /// Where to dock the assistant.
436    ///
437    /// Default: right
438    pub dock: Option<AssistantDockPosition>,
439    /// Default width in pixels when the assistant is docked to the left or right.
440    ///
441    /// Default: 640
442    pub default_width: Option<f32>,
443    /// Default height in pixels when the assistant is docked to the bottom.
444    ///
445    /// Default: 320
446    pub default_height: Option<f32>,
447    /// The default OpenAI model to use when creating new contexts.
448    ///
449    /// Default: gpt-4-1106-preview
450    pub default_open_ai_model: Option<OpenAiModel>,
451    /// OpenAI API base URL to use when creating new contexts.
452    ///
453    /// Default: https://api.openai.com/v1
454    pub openai_api_url: Option<String>,
455}
456
457impl Settings for AssistantSettings {
458    const KEY: Option<&'static str> = Some("assistant");
459
460    type FileContent = AssistantSettingsContent;
461
462    fn load(
463        sources: SettingsSources<Self::FileContent>,
464        _: &mut gpui::AppContext,
465    ) -> anyhow::Result<Self> {
466        let mut settings = AssistantSettings::default();
467
468        for value in sources.defaults_and_customizations() {
469            if value.is_version_outdated() {
470                settings.using_outdated_settings_version = true;
471            }
472
473            let value = value.upgrade();
474            merge(&mut settings.enabled, value.enabled);
475            merge(&mut settings.button, value.button);
476            merge(&mut settings.dock, value.dock);
477            merge(
478                &mut settings.default_width,
479                value.default_width.map(Into::into),
480            );
481            merge(
482                &mut settings.default_height,
483                value.default_height.map(Into::into),
484            );
485            merge(
486                &mut settings.default_model,
487                value.default_model.map(Into::into),
488            );
489        }
490
491        Ok(settings)
492    }
493}
494
495fn merge<T>(target: &mut T, value: Option<T>) {
496    if let Some(value) = value {
497        *target = value;
498    }
499}
500
501// #[cfg(test)]
502// mod tests {
503//     use gpui::{AppContext, UpdateGlobal};
504//     use settings::SettingsStore;
505
506//     use super::*;
507
508//     #[gpui::test]
509//     fn test_deserialize_assistant_settings(cx: &mut AppContext) {
510//         let store = settings::SettingsStore::test(cx);
511//         cx.set_global(store);
512
513//         // Settings default to gpt-4-turbo.
514//         AssistantSettings::register(cx);
515//         assert_eq!(
516//             AssistantSettings::get_global(cx).provider,
517//             AssistantProvider::OpenAi {
518//                 model: OpenAiModel::FourOmni,
519//                 api_url: open_ai::OPEN_AI_API_URL.into(),
520//                 low_speed_timeout_in_seconds: None,
521//                 available_models: Default::default(),
522//             }
523//         );
524
525//         // Ensure backward-compatibility.
526//         SettingsStore::update_global(cx, |store, cx| {
527//             store
528//                 .set_user_settings(
529//                     r#"{
530//                         "assistant": {
531//                             "openai_api_url": "test-url",
532//                         }
533//                     }"#,
534//                     cx,
535//                 )
536//                 .unwrap();
537//         });
538//         assert_eq!(
539//             AssistantSettings::get_global(cx).provider,
540//             AssistantProvider::OpenAi {
541//                 model: OpenAiModel::FourOmni,
542//                 api_url: "test-url".into(),
543//                 low_speed_timeout_in_seconds: None,
544//                 available_models: Default::default(),
545//             }
546//         );
547//         SettingsStore::update_global(cx, |store, cx| {
548//             store
549//                 .set_user_settings(
550//                     r#"{
551//                         "assistant": {
552//                             "default_open_ai_model": "gpt-4-0613"
553//                         }
554//                     }"#,
555//                     cx,
556//                 )
557//                 .unwrap();
558//         });
559//         assert_eq!(
560//             AssistantSettings::get_global(cx).provider,
561//             AssistantProvider::OpenAi {
562//                 model: OpenAiModel::Four,
563//                 api_url: open_ai::OPEN_AI_API_URL.into(),
564//                 low_speed_timeout_in_seconds: None,
565//                 available_models: Default::default(),
566//             }
567//         );
568
569//         // The new version supports setting a custom model when using zed.dev.
570//         SettingsStore::update_global(cx, |store, cx| {
571//             store
572//                 .set_user_settings(
573//                     r#"{
574//                         "assistant": {
575//                             "version": "1",
576//                             "provider": {
577//                                 "name": "zed.dev",
578//                                 "default_model": {
579//                                     "custom": {
580//                                         "name": "custom-provider"
581//                                     }
582//                                 }
583//                             }
584//                         }
585//                     }"#,
586//                     cx,
587//                 )
588//                 .unwrap();
589//         });
590//         assert_eq!(
591//             AssistantSettings::get_global(cx).provider,
592//             AssistantProvider::ZedDotDev {
593//                 model: CloudModel::Custom {
594//                     name: "custom-provider".into(),
595//                     max_tokens: None
596//                 }
597//             }
598//         );
599//     }
600// }