1use std::sync::Arc;
2
3use ::open_ai::Model as OpenAiModel;
4use anthropic::Model as AnthropicModel;
5use gpui::Pixels;
6use language_model::{CloudModel, LanguageModel};
7use lmstudio::Model as LmStudioModel;
8use ollama::Model as OllamaModel;
9use schemars::{schema::Schema, JsonSchema};
10use serde::{Deserialize, Serialize};
11use settings::{Settings, SettingsSources};
12
13#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
14#[serde(rename_all = "snake_case")]
15pub enum AssistantDockPosition {
16 Left,
17 #[default]
18 Right,
19 Bottom,
20}
21
22#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
23#[serde(tag = "name", rename_all = "snake_case")]
24pub enum AssistantProviderContentV1 {
25 #[serde(rename = "zed.dev")]
26 ZedDotDev { default_model: Option<CloudModel> },
27 #[serde(rename = "openai")]
28 OpenAi {
29 default_model: Option<OpenAiModel>,
30 api_url: Option<String>,
31 available_models: Option<Vec<OpenAiModel>>,
32 },
33 #[serde(rename = "anthropic")]
34 Anthropic {
35 default_model: Option<AnthropicModel>,
36 api_url: Option<String>,
37 },
38 #[serde(rename = "ollama")]
39 Ollama {
40 default_model: Option<OllamaModel>,
41 api_url: Option<String>,
42 },
43 #[serde(rename = "lmstudio")]
44 LmStudio {
45 default_model: Option<LmStudioModel>,
46 api_url: Option<String>,
47 },
48}
49
50#[derive(Debug, Default)]
51pub struct AssistantSettings {
52 pub enabled: bool,
53 pub button: bool,
54 pub dock: AssistantDockPosition,
55 pub default_width: Pixels,
56 pub default_height: Pixels,
57 pub default_model: LanguageModelSelection,
58 pub inline_alternatives: Vec<LanguageModelSelection>,
59 pub using_outdated_settings_version: bool,
60 pub enable_experimental_live_diffs: bool,
61}
62
63/// Assistant panel settings
64#[derive(Clone, Serialize, Deserialize, Debug)]
65#[serde(untagged)]
66pub enum AssistantSettingsContent {
67 Versioned(VersionedAssistantSettingsContent),
68 Legacy(LegacyAssistantSettingsContent),
69}
70
71impl JsonSchema for AssistantSettingsContent {
72 fn schema_name() -> String {
73 VersionedAssistantSettingsContent::schema_name()
74 }
75
76 fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> Schema {
77 VersionedAssistantSettingsContent::json_schema(gen)
78 }
79
80 fn is_referenceable() -> bool {
81 VersionedAssistantSettingsContent::is_referenceable()
82 }
83}
84
85impl Default for AssistantSettingsContent {
86 fn default() -> Self {
87 Self::Versioned(VersionedAssistantSettingsContent::default())
88 }
89}
90
91impl AssistantSettingsContent {
92 pub fn is_version_outdated(&self) -> bool {
93 match self {
94 AssistantSettingsContent::Versioned(settings) => match settings {
95 VersionedAssistantSettingsContent::V1(_) => true,
96 VersionedAssistantSettingsContent::V2(_) => false,
97 },
98 AssistantSettingsContent::Legacy(_) => true,
99 }
100 }
101
102 fn upgrade(&self) -> AssistantSettingsContentV2 {
103 match self {
104 AssistantSettingsContent::Versioned(settings) => match settings {
105 VersionedAssistantSettingsContent::V1(settings) => AssistantSettingsContentV2 {
106 enabled: settings.enabled,
107 button: settings.button,
108 dock: settings.dock,
109 default_width: settings.default_width,
110 default_height: settings.default_width,
111 default_model: settings
112 .provider
113 .clone()
114 .and_then(|provider| match provider {
115 AssistantProviderContentV1::ZedDotDev { default_model } => {
116 default_model.map(|model| LanguageModelSelection {
117 provider: "zed.dev".to_string(),
118 model: model.id().to_string(),
119 })
120 }
121 AssistantProviderContentV1::OpenAi { default_model, .. } => {
122 default_model.map(|model| LanguageModelSelection {
123 provider: "openai".to_string(),
124 model: model.id().to_string(),
125 })
126 }
127 AssistantProviderContentV1::Anthropic { default_model, .. } => {
128 default_model.map(|model| LanguageModelSelection {
129 provider: "anthropic".to_string(),
130 model: model.id().to_string(),
131 })
132 }
133 AssistantProviderContentV1::Ollama { default_model, .. } => {
134 default_model.map(|model| LanguageModelSelection {
135 provider: "ollama".to_string(),
136 model: model.id().to_string(),
137 })
138 }
139 AssistantProviderContentV1::LmStudio { default_model, .. } => {
140 default_model.map(|model| LanguageModelSelection {
141 provider: "lmstudio".to_string(),
142 model: model.id().to_string(),
143 })
144 }
145 }),
146 inline_alternatives: None,
147 enable_experimental_live_diffs: None,
148 },
149 VersionedAssistantSettingsContent::V2(settings) => settings.clone(),
150 },
151 AssistantSettingsContent::Legacy(settings) => AssistantSettingsContentV2 {
152 enabled: None,
153 button: settings.button,
154 dock: settings.dock,
155 default_width: settings.default_width,
156 default_height: settings.default_height,
157 default_model: Some(LanguageModelSelection {
158 provider: "openai".to_string(),
159 model: settings
160 .default_open_ai_model
161 .clone()
162 .unwrap_or_default()
163 .id()
164 .to_string(),
165 }),
166 inline_alternatives: None,
167 enable_experimental_live_diffs: None,
168 },
169 }
170 }
171
172 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
173 match self {
174 AssistantSettingsContent::Versioned(settings) => match settings {
175 VersionedAssistantSettingsContent::V1(settings) => {
176 settings.dock = Some(dock);
177 }
178 VersionedAssistantSettingsContent::V2(settings) => {
179 settings.dock = Some(dock);
180 }
181 },
182 AssistantSettingsContent::Legacy(settings) => {
183 settings.dock = Some(dock);
184 }
185 }
186 }
187
188 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
189 let model = language_model.id().0.to_string();
190 let provider = language_model.provider_id().0.to_string();
191
192 match self {
193 AssistantSettingsContent::Versioned(settings) => match settings {
194 VersionedAssistantSettingsContent::V1(settings) => match provider.as_ref() {
195 "zed.dev" => {
196 log::warn!("attempted to set zed.dev model on outdated settings");
197 }
198 "anthropic" => {
199 let api_url = match &settings.provider {
200 Some(AssistantProviderContentV1::Anthropic { api_url, .. }) => {
201 api_url.clone()
202 }
203 _ => None,
204 };
205 settings.provider = Some(AssistantProviderContentV1::Anthropic {
206 default_model: AnthropicModel::from_id(&model).ok(),
207 api_url,
208 });
209 }
210 "ollama" => {
211 let api_url = match &settings.provider {
212 Some(AssistantProviderContentV1::Ollama { api_url, .. }) => {
213 api_url.clone()
214 }
215 _ => None,
216 };
217 settings.provider = Some(AssistantProviderContentV1::Ollama {
218 default_model: Some(ollama::Model::new(&model, None, None)),
219 api_url,
220 });
221 }
222 "lmstudio" => {
223 let api_url = match &settings.provider {
224 Some(AssistantProviderContentV1::LmStudio { api_url, .. }) => {
225 api_url.clone()
226 }
227 _ => None,
228 };
229 settings.provider = Some(AssistantProviderContentV1::LmStudio {
230 default_model: Some(lmstudio::Model::new(&model, None, None)),
231 api_url,
232 });
233 }
234 "openai" => {
235 let (api_url, available_models) = match &settings.provider {
236 Some(AssistantProviderContentV1::OpenAi {
237 api_url,
238 available_models,
239 ..
240 }) => (api_url.clone(), available_models.clone()),
241 _ => (None, None),
242 };
243 settings.provider = Some(AssistantProviderContentV1::OpenAi {
244 default_model: OpenAiModel::from_id(&model).ok(),
245 api_url,
246 available_models,
247 });
248 }
249 _ => {}
250 },
251 VersionedAssistantSettingsContent::V2(settings) => {
252 settings.default_model = Some(LanguageModelSelection { provider, model });
253 }
254 },
255 AssistantSettingsContent::Legacy(settings) => {
256 if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
257 settings.default_open_ai_model = Some(model);
258 }
259 }
260 }
261 }
262}
263
264#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
265#[serde(tag = "version")]
266pub enum VersionedAssistantSettingsContent {
267 #[serde(rename = "1")]
268 V1(AssistantSettingsContentV1),
269 #[serde(rename = "2")]
270 V2(AssistantSettingsContentV2),
271}
272
273impl Default for VersionedAssistantSettingsContent {
274 fn default() -> Self {
275 Self::V2(AssistantSettingsContentV2 {
276 enabled: None,
277 button: None,
278 dock: None,
279 default_width: None,
280 default_height: None,
281 default_model: None,
282 inline_alternatives: None,
283 enable_experimental_live_diffs: None,
284 })
285 }
286}
287
288#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
289pub struct AssistantSettingsContentV2 {
290 /// Whether the Assistant is enabled.
291 ///
292 /// Default: true
293 enabled: Option<bool>,
294 /// Whether to show the assistant panel button in the status bar.
295 ///
296 /// Default: true
297 button: Option<bool>,
298 /// Where to dock the assistant.
299 ///
300 /// Default: right
301 dock: Option<AssistantDockPosition>,
302 /// Default width in pixels when the assistant is docked to the left or right.
303 ///
304 /// Default: 640
305 default_width: Option<f32>,
306 /// Default height in pixels when the assistant is docked to the bottom.
307 ///
308 /// Default: 320
309 default_height: Option<f32>,
310 /// The default model to use when creating new chats.
311 default_model: Option<LanguageModelSelection>,
312 /// Additional models with which to generate alternatives when performing inline assists.
313 inline_alternatives: Option<Vec<LanguageModelSelection>>,
314 /// Enable experimental live diffs in the assistant panel.
315 ///
316 /// Default: false
317 enable_experimental_live_diffs: Option<bool>,
318}
319
320#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
321pub struct LanguageModelSelection {
322 #[schemars(schema_with = "providers_schema")]
323 pub provider: String,
324 pub model: String,
325}
326
327fn providers_schema(_: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
328 schemars::schema::SchemaObject {
329 enum_values: Some(vec![
330 "anthropic".into(),
331 "google".into(),
332 "lmstudio".into(),
333 "ollama".into(),
334 "openai".into(),
335 "zed.dev".into(),
336 "copilot_chat".into(),
337 ]),
338 ..Default::default()
339 }
340 .into()
341}
342
343impl Default for LanguageModelSelection {
344 fn default() -> Self {
345 Self {
346 provider: "openai".to_string(),
347 model: "gpt-4".to_string(),
348 }
349 }
350}
351
352#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
353pub struct AssistantSettingsContentV1 {
354 /// Whether the Assistant is enabled.
355 ///
356 /// Default: true
357 enabled: Option<bool>,
358 /// Whether to show the assistant panel button in the status bar.
359 ///
360 /// Default: true
361 button: Option<bool>,
362 /// Where to dock the assistant.
363 ///
364 /// Default: right
365 dock: Option<AssistantDockPosition>,
366 /// Default width in pixels when the assistant is docked to the left or right.
367 ///
368 /// Default: 640
369 default_width: Option<f32>,
370 /// Default height in pixels when the assistant is docked to the bottom.
371 ///
372 /// Default: 320
373 default_height: Option<f32>,
374 /// The provider of the assistant service.
375 ///
376 /// This can be "openai", "anthropic", "ollama", "zed.dev"
377 /// each with their respective default models and configurations.
378 provider: Option<AssistantProviderContentV1>,
379}
380
381#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
382pub struct LegacyAssistantSettingsContent {
383 /// Whether to show the assistant panel button in the status bar.
384 ///
385 /// Default: true
386 pub button: Option<bool>,
387 /// Where to dock the assistant.
388 ///
389 /// Default: right
390 pub dock: Option<AssistantDockPosition>,
391 /// Default width in pixels when the assistant is docked to the left or right.
392 ///
393 /// Default: 640
394 pub default_width: Option<f32>,
395 /// Default height in pixels when the assistant is docked to the bottom.
396 ///
397 /// Default: 320
398 pub default_height: Option<f32>,
399 /// The default OpenAI model to use when creating new chats.
400 ///
401 /// Default: gpt-4-1106-preview
402 pub default_open_ai_model: Option<OpenAiModel>,
403 /// OpenAI API base URL to use when creating new chats.
404 ///
405 /// Default: https://api.openai.com/v1
406 pub openai_api_url: Option<String>,
407}
408
409impl Settings for AssistantSettings {
410 const KEY: Option<&'static str> = Some("assistant");
411
412 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
413
414 type FileContent = AssistantSettingsContent;
415
416 fn load(
417 sources: SettingsSources<Self::FileContent>,
418 _: &mut gpui::AppContext,
419 ) -> anyhow::Result<Self> {
420 let mut settings = AssistantSettings::default();
421
422 for value in sources.defaults_and_customizations() {
423 if value.is_version_outdated() {
424 settings.using_outdated_settings_version = true;
425 }
426
427 let value = value.upgrade();
428 merge(&mut settings.enabled, value.enabled);
429 merge(&mut settings.button, value.button);
430 merge(&mut settings.dock, value.dock);
431 merge(
432 &mut settings.default_width,
433 value.default_width.map(Into::into),
434 );
435 merge(
436 &mut settings.default_height,
437 value.default_height.map(Into::into),
438 );
439 merge(&mut settings.default_model, value.default_model);
440 merge(&mut settings.inline_alternatives, value.inline_alternatives);
441 merge(
442 &mut settings.enable_experimental_live_diffs,
443 value.enable_experimental_live_diffs,
444 );
445 }
446
447 Ok(settings)
448 }
449}
450
451fn merge<T>(target: &mut T, value: Option<T>) {
452 if let Some(value) = value {
453 *target = value;
454 }
455}
456
457#[cfg(test)]
458mod tests {
459 use fs::Fs;
460 use gpui::{ReadGlobal, TestAppContext};
461
462 use super::*;
463
464 #[gpui::test]
465 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
466 let fs = fs::FakeFs::new(cx.executor().clone());
467 fs.create_dir(paths::settings_file().parent().unwrap())
468 .await
469 .unwrap();
470
471 cx.update(|cx| {
472 let test_settings = settings::SettingsStore::test(cx);
473 cx.set_global(test_settings);
474 AssistantSettings::register(cx);
475 });
476
477 cx.update(|cx| {
478 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
479 assert_eq!(
480 AssistantSettings::get_global(cx).default_model,
481 LanguageModelSelection {
482 provider: "zed.dev".into(),
483 model: "claude-3-5-sonnet".into(),
484 }
485 );
486 });
487
488 cx.update(|cx| {
489 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
490 fs.clone(),
491 |settings, _| {
492 *settings = AssistantSettingsContent::Versioned(
493 VersionedAssistantSettingsContent::V2(AssistantSettingsContentV2 {
494 default_model: Some(LanguageModelSelection {
495 provider: "test-provider".into(),
496 model: "gpt-99".into(),
497 }),
498 inline_alternatives: None,
499 enabled: None,
500 button: None,
501 dock: None,
502 default_width: None,
503 default_height: None,
504 enable_experimental_live_diffs: None,
505 }),
506 )
507 },
508 );
509 });
510
511 cx.run_until_parked();
512
513 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
514 assert!(raw_settings_value.contains(r#""version": "2""#));
515
516 #[derive(Debug, Deserialize)]
517 struct AssistantSettingsTest {
518 assistant: AssistantSettingsContent,
519 }
520
521 let assistant_settings: AssistantSettingsTest =
522 serde_json_lenient::from_str(&raw_settings_value).unwrap();
523
524 assert!(!assistant_settings.assistant.is_version_outdated());
525 }
526}