1use std::sync::Arc;
2
3use ::open_ai::Model as OpenAiModel;
4use anthropic::Model as AnthropicModel;
5use feature_flags::FeatureFlagAppExt;
6use gpui::{AppContext, Pixels};
7use language_model::{CloudModel, LanguageModel};
8use lmstudio::Model as LmStudioModel;
9use ollama::Model as OllamaModel;
10use schemars::{schema::Schema, JsonSchema};
11use serde::{Deserialize, Serialize};
12use settings::{Settings, SettingsSources};
13
14#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
15#[serde(rename_all = "snake_case")]
16pub enum AssistantDockPosition {
17 Left,
18 #[default]
19 Right,
20 Bottom,
21}
22
23#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
24#[serde(tag = "name", rename_all = "snake_case")]
25pub enum AssistantProviderContentV1 {
26 #[serde(rename = "zed.dev")]
27 ZedDotDev { default_model: Option<CloudModel> },
28 #[serde(rename = "openai")]
29 OpenAi {
30 default_model: Option<OpenAiModel>,
31 api_url: Option<String>,
32 available_models: Option<Vec<OpenAiModel>>,
33 },
34 #[serde(rename = "anthropic")]
35 Anthropic {
36 default_model: Option<AnthropicModel>,
37 api_url: Option<String>,
38 },
39 #[serde(rename = "ollama")]
40 Ollama {
41 default_model: Option<OllamaModel>,
42 api_url: Option<String>,
43 },
44 LmStudio {
45 default_model: Option<LmStudioModel>,
46 api_url: Option<String>,
47 },
48}
49
50#[derive(Debug, Default)]
51pub struct AssistantSettings {
52 pub enabled: bool,
53 pub button: bool,
54 pub dock: AssistantDockPosition,
55 pub default_width: Pixels,
56 pub default_height: Pixels,
57 pub default_model: LanguageModelSelection,
58 pub inline_alternatives: Vec<LanguageModelSelection>,
59 pub using_outdated_settings_version: bool,
60 pub enable_experimental_live_diffs: bool,
61}
62
63impl AssistantSettings {
64 pub fn are_live_diffs_enabled(&self, cx: &AppContext) -> bool {
65 cx.is_staff() || self.enable_experimental_live_diffs
66 }
67}
68
69/// Assistant panel settings
70#[derive(Clone, Serialize, Deserialize, Debug)]
71#[serde(untagged)]
72pub enum AssistantSettingsContent {
73 Versioned(VersionedAssistantSettingsContent),
74 Legacy(LegacyAssistantSettingsContent),
75}
76
77impl JsonSchema for AssistantSettingsContent {
78 fn schema_name() -> String {
79 VersionedAssistantSettingsContent::schema_name()
80 }
81
82 fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> Schema {
83 VersionedAssistantSettingsContent::json_schema(gen)
84 }
85
86 fn is_referenceable() -> bool {
87 VersionedAssistantSettingsContent::is_referenceable()
88 }
89}
90
91impl Default for AssistantSettingsContent {
92 fn default() -> Self {
93 Self::Versioned(VersionedAssistantSettingsContent::default())
94 }
95}
96
97impl AssistantSettingsContent {
98 pub fn is_version_outdated(&self) -> bool {
99 match self {
100 AssistantSettingsContent::Versioned(settings) => match settings {
101 VersionedAssistantSettingsContent::V1(_) => true,
102 VersionedAssistantSettingsContent::V2(_) => false,
103 },
104 AssistantSettingsContent::Legacy(_) => true,
105 }
106 }
107
108 fn upgrade(&self) -> AssistantSettingsContentV2 {
109 match self {
110 AssistantSettingsContent::Versioned(settings) => match settings {
111 VersionedAssistantSettingsContent::V1(settings) => AssistantSettingsContentV2 {
112 enabled: settings.enabled,
113 button: settings.button,
114 dock: settings.dock,
115 default_width: settings.default_width,
116 default_height: settings.default_width,
117 default_model: settings
118 .provider
119 .clone()
120 .and_then(|provider| match provider {
121 AssistantProviderContentV1::ZedDotDev { default_model } => {
122 default_model.map(|model| LanguageModelSelection {
123 provider: "zed.dev".to_string(),
124 model: model.id().to_string(),
125 })
126 }
127 AssistantProviderContentV1::OpenAi { default_model, .. } => {
128 default_model.map(|model| LanguageModelSelection {
129 provider: "openai".to_string(),
130 model: model.id().to_string(),
131 })
132 }
133 AssistantProviderContentV1::Anthropic { default_model, .. } => {
134 default_model.map(|model| LanguageModelSelection {
135 provider: "anthropic".to_string(),
136 model: model.id().to_string(),
137 })
138 }
139 AssistantProviderContentV1::Ollama { default_model, .. } => {
140 default_model.map(|model| LanguageModelSelection {
141 provider: "ollama".to_string(),
142 model: model.id().to_string(),
143 })
144 }
145 AssistantProviderContentV1::LmStudio { default_model, .. } => {
146 default_model.map(|model| LanguageModelSelection {
147 provider: "lmstudio".to_string(),
148 model: model.id().to_string(),
149 })
150 }
151 }),
152 inline_alternatives: None,
153 enable_experimental_live_diffs: None,
154 },
155 VersionedAssistantSettingsContent::V2(settings) => settings.clone(),
156 },
157 AssistantSettingsContent::Legacy(settings) => AssistantSettingsContentV2 {
158 enabled: None,
159 button: settings.button,
160 dock: settings.dock,
161 default_width: settings.default_width,
162 default_height: settings.default_height,
163 default_model: Some(LanguageModelSelection {
164 provider: "openai".to_string(),
165 model: settings
166 .default_open_ai_model
167 .clone()
168 .unwrap_or_default()
169 .id()
170 .to_string(),
171 }),
172 inline_alternatives: None,
173 enable_experimental_live_diffs: None,
174 },
175 }
176 }
177
178 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
179 match self {
180 AssistantSettingsContent::Versioned(settings) => match settings {
181 VersionedAssistantSettingsContent::V1(settings) => {
182 settings.dock = Some(dock);
183 }
184 VersionedAssistantSettingsContent::V2(settings) => {
185 settings.dock = Some(dock);
186 }
187 },
188 AssistantSettingsContent::Legacy(settings) => {
189 settings.dock = Some(dock);
190 }
191 }
192 }
193
194 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
195 let model = language_model.id().0.to_string();
196 let provider = language_model.provider_id().0.to_string();
197
198 match self {
199 AssistantSettingsContent::Versioned(settings) => match settings {
200 VersionedAssistantSettingsContent::V1(settings) => match provider.as_ref() {
201 "zed.dev" => {
202 log::warn!("attempted to set zed.dev model on outdated settings");
203 }
204 "anthropic" => {
205 let api_url = match &settings.provider {
206 Some(AssistantProviderContentV1::Anthropic { api_url, .. }) => {
207 api_url.clone()
208 }
209 _ => None,
210 };
211 settings.provider = Some(AssistantProviderContentV1::Anthropic {
212 default_model: AnthropicModel::from_id(&model).ok(),
213 api_url,
214 });
215 }
216 "ollama" => {
217 let api_url = match &settings.provider {
218 Some(AssistantProviderContentV1::Ollama { api_url, .. }) => {
219 api_url.clone()
220 }
221 _ => None,
222 };
223 settings.provider = Some(AssistantProviderContentV1::Ollama {
224 default_model: Some(ollama::Model::new(&model, None, None)),
225 api_url,
226 });
227 }
228 "lmstudio" => {
229 let api_url = match &settings.provider {
230 Some(AssistantProviderContentV1::LmStudio { api_url, .. }) => {
231 api_url.clone()
232 }
233 _ => None,
234 };
235 settings.provider = Some(AssistantProviderContentV1::LmStudio {
236 default_model: Some(lmstudio::Model::new(&model, None, None)),
237 api_url,
238 });
239 }
240 "openai" => {
241 let (api_url, available_models) = match &settings.provider {
242 Some(AssistantProviderContentV1::OpenAi {
243 api_url,
244 available_models,
245 ..
246 }) => (api_url.clone(), available_models.clone()),
247 _ => (None, None),
248 };
249 settings.provider = Some(AssistantProviderContentV1::OpenAi {
250 default_model: OpenAiModel::from_id(&model).ok(),
251 api_url,
252 available_models,
253 });
254 }
255 _ => {}
256 },
257 VersionedAssistantSettingsContent::V2(settings) => {
258 settings.default_model = Some(LanguageModelSelection { provider, model });
259 }
260 },
261 AssistantSettingsContent::Legacy(settings) => {
262 if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
263 settings.default_open_ai_model = Some(model);
264 }
265 }
266 }
267 }
268}
269
270#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
271#[serde(tag = "version")]
272pub enum VersionedAssistantSettingsContent {
273 #[serde(rename = "1")]
274 V1(AssistantSettingsContentV1),
275 #[serde(rename = "2")]
276 V2(AssistantSettingsContentV2),
277}
278
279impl Default for VersionedAssistantSettingsContent {
280 fn default() -> Self {
281 Self::V2(AssistantSettingsContentV2 {
282 enabled: None,
283 button: None,
284 dock: None,
285 default_width: None,
286 default_height: None,
287 default_model: None,
288 inline_alternatives: None,
289 enable_experimental_live_diffs: None,
290 })
291 }
292}
293
294#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
295pub struct AssistantSettingsContentV2 {
296 /// Whether the Assistant is enabled.
297 ///
298 /// Default: true
299 enabled: Option<bool>,
300 /// Whether to show the assistant panel button in the status bar.
301 ///
302 /// Default: true
303 button: Option<bool>,
304 /// Where to dock the assistant.
305 ///
306 /// Default: right
307 dock: Option<AssistantDockPosition>,
308 /// Default width in pixels when the assistant is docked to the left or right.
309 ///
310 /// Default: 640
311 default_width: Option<f32>,
312 /// Default height in pixels when the assistant is docked to the bottom.
313 ///
314 /// Default: 320
315 default_height: Option<f32>,
316 /// The default model to use when creating new chats.
317 default_model: Option<LanguageModelSelection>,
318 /// Additional models with which to generate alternatives when performing inline assists.
319 inline_alternatives: Option<Vec<LanguageModelSelection>>,
320 /// Enable experimental live diffs in the assistant panel.
321 ///
322 /// Default: false
323 enable_experimental_live_diffs: Option<bool>,
324}
325
326#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
327pub struct LanguageModelSelection {
328 #[schemars(schema_with = "providers_schema")]
329 pub provider: String,
330 pub model: String,
331}
332
333fn providers_schema(_: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
334 schemars::schema::SchemaObject {
335 enum_values: Some(vec![
336 "anthropic".into(),
337 "google".into(),
338 "ollama".into(),
339 "lmstudio".into(),
340 "openai".into(),
341 "zed.dev".into(),
342 "copilot_chat".into(),
343 ]),
344 ..Default::default()
345 }
346 .into()
347}
348
349impl Default for LanguageModelSelection {
350 fn default() -> Self {
351 Self {
352 provider: "openai".to_string(),
353 model: "gpt-4".to_string(),
354 }
355 }
356}
357
358#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
359pub struct AssistantSettingsContentV1 {
360 /// Whether the Assistant is enabled.
361 ///
362 /// Default: true
363 enabled: Option<bool>,
364 /// Whether to show the assistant panel button in the status bar.
365 ///
366 /// Default: true
367 button: Option<bool>,
368 /// Where to dock the assistant.
369 ///
370 /// Default: right
371 dock: Option<AssistantDockPosition>,
372 /// Default width in pixels when the assistant is docked to the left or right.
373 ///
374 /// Default: 640
375 default_width: Option<f32>,
376 /// Default height in pixels when the assistant is docked to the bottom.
377 ///
378 /// Default: 320
379 default_height: Option<f32>,
380 /// The provider of the assistant service.
381 ///
382 /// This can be "openai", "anthropic", "ollama", "lmstudio", "zed.dev"
383 /// each with their respective default models and configurations.
384 provider: Option<AssistantProviderContentV1>,
385}
386
387#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
388pub struct LegacyAssistantSettingsContent {
389 /// Whether to show the assistant panel button in the status bar.
390 ///
391 /// Default: true
392 pub button: Option<bool>,
393 /// Where to dock the assistant.
394 ///
395 /// Default: right
396 pub dock: Option<AssistantDockPosition>,
397 /// Default width in pixels when the assistant is docked to the left or right.
398 ///
399 /// Default: 640
400 pub default_width: Option<f32>,
401 /// Default height in pixels when the assistant is docked to the bottom.
402 ///
403 /// Default: 320
404 pub default_height: Option<f32>,
405 /// The default OpenAI model to use when creating new chats.
406 ///
407 /// Default: gpt-4-1106-preview
408 pub default_open_ai_model: Option<OpenAiModel>,
409 /// OpenAI API base URL to use when creating new chats.
410 ///
411 /// Default: https://api.openai.com/v1
412 pub openai_api_url: Option<String>,
413}
414
415impl Settings for AssistantSettings {
416 const KEY: Option<&'static str> = Some("assistant");
417
418 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
419
420 type FileContent = AssistantSettingsContent;
421
422 fn load(
423 sources: SettingsSources<Self::FileContent>,
424 _: &mut gpui::AppContext,
425 ) -> anyhow::Result<Self> {
426 let mut settings = AssistantSettings::default();
427
428 for value in sources.defaults_and_customizations() {
429 if value.is_version_outdated() {
430 settings.using_outdated_settings_version = true;
431 }
432
433 let value = value.upgrade();
434 merge(&mut settings.enabled, value.enabled);
435 merge(&mut settings.button, value.button);
436 merge(&mut settings.dock, value.dock);
437 merge(
438 &mut settings.default_width,
439 value.default_width.map(Into::into),
440 );
441 merge(
442 &mut settings.default_height,
443 value.default_height.map(Into::into),
444 );
445 merge(&mut settings.default_model, value.default_model);
446 merge(&mut settings.inline_alternatives, value.inline_alternatives);
447 merge(
448 &mut settings.enable_experimental_live_diffs,
449 value.enable_experimental_live_diffs,
450 );
451 }
452
453 Ok(settings)
454 }
455}
456
457fn merge<T>(target: &mut T, value: Option<T>) {
458 if let Some(value) = value {
459 *target = value;
460 }
461}
462
463#[cfg(test)]
464mod tests {
465 use fs::Fs;
466 use gpui::{ReadGlobal, TestAppContext};
467
468 use super::*;
469
470 #[gpui::test]
471 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
472 let fs = fs::FakeFs::new(cx.executor().clone());
473 fs.create_dir(paths::settings_file().parent().unwrap())
474 .await
475 .unwrap();
476
477 cx.update(|cx| {
478 let test_settings = settings::SettingsStore::test(cx);
479 cx.set_global(test_settings);
480 AssistantSettings::register(cx);
481 });
482
483 cx.update(|cx| {
484 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
485 assert_eq!(
486 AssistantSettings::get_global(cx).default_model,
487 LanguageModelSelection {
488 provider: "zed.dev".into(),
489 model: "claude-3-5-sonnet".into(),
490 }
491 );
492 });
493
494 cx.update(|cx| {
495 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
496 fs.clone(),
497 |settings, _| {
498 *settings = AssistantSettingsContent::Versioned(
499 VersionedAssistantSettingsContent::V2(AssistantSettingsContentV2 {
500 default_model: Some(LanguageModelSelection {
501 provider: "test-provider".into(),
502 model: "gpt-99".into(),
503 }),
504 inline_alternatives: None,
505 enabled: None,
506 button: None,
507 dock: None,
508 default_width: None,
509 default_height: None,
510 enable_experimental_live_diffs: None,
511 }),
512 )
513 },
514 );
515 });
516
517 cx.run_until_parked();
518
519 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
520 assert!(raw_settings_value.contains(r#""version": "2""#));
521
522 #[derive(Debug, Deserialize)]
523 struct AssistantSettingsTest {
524 assistant: AssistantSettingsContent,
525 }
526
527 let assistant_settings: AssistantSettingsTest =
528 serde_json_lenient::from_str(&raw_settings_value).unwrap();
529
530 assert!(!assistant_settings.assistant.is_version_outdated());
531 }
532}